summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/media
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--testing/web-platform/tests/media-capabilities/META.yml3
-rw-r--r--testing/web-platform/tests/media-capabilities/README.md14
-rw-r--r--testing/web-platform/tests/media-capabilities/decodingInfo.any.js406
-rw-r--r--testing/web-platform/tests/media-capabilities/decodingInfo.webrtc.html217
-rw-r--r--testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.http.html31
-rw-r--r--testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.https.html262
-rw-r--r--testing/web-platform/tests/media-capabilities/encodingInfo.any.js310
-rw-r--r--testing/web-platform/tests/media-capabilities/encodingInfo.webrtc.html217
-rw-r--r--testing/web-platform/tests/media-capabilities/idlharness.any.js25
-rw-r--r--testing/web-platform/tests/media-playback-quality/META.yml3
-rw-r--r--testing/web-platform/tests/media-playback-quality/idlharness.window.js20
-rw-r--r--testing/web-platform/tests/media-source/META.yml3
-rw-r--r--testing/web-platform/tests/media-source/SourceBuffer-abort-readyState.html72
-rw-r--r--testing/web-platform/tests/media-source/SourceBuffer-abort-removed.html52
-rw-r--r--testing/web-platform/tests/media-source/SourceBuffer-abort-updating.html92
-rw-r--r--testing/web-platform/tests/media-source/SourceBuffer-abort.html34
-rw-r--r--testing/web-platform/tests/media-source/URL-createObjectURL-null.html19
-rw-r--r--testing/web-platform/tests/media-source/URL-createObjectURL-revoke.html59
-rw-r--r--testing/web-platform/tests/media-source/URL-createObjectURL.html20
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-message-util.js16
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.html75
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.js79
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.html86
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.js290
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-get-objecturl.js13
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer-to-main.js10
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.html316
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.js19
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.html61
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.js70
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-must-fail-if-unsupported.js18
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.html52
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.js33
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.html85
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.js15
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.html49
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.js74
-rw-r--r--testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-util.js60
-rwxr-xr-xtesting/web-platform/tests/media-source/generate-config-change-tests.py225
-rw-r--r--testing/web-platform/tests/media-source/idlharness.window.js35
-rwxr-xr-xtesting/web-platform/tests/media-source/import_tests.sh52
-rw-r--r--testing/web-platform/tests/media-source/manifest.txt55
-rw-r--r--testing/web-platform/tests/media-source/mediasource-activesourcebuffers.html238
-rw-r--r--testing/web-platform/tests/media-source/mediasource-addsourcebuffer-mode.html31
-rw-r--r--testing/web-platform/tests/media-source/mediasource-addsourcebuffer.html133
-rw-r--r--testing/web-platform/tests/media-source/mediasource-append-buffer.html623
-rw-r--r--testing/web-platform/tests/media-source/mediasource-appendbuffer-quota-exceeded.html75
-rw-r--r--testing/web-platform/tests/media-source/mediasource-appendwindow.html176
-rw-r--r--testing/web-platform/tests/media-source/mediasource-attach-stops-delaying-load-event.html49
-rw-r--r--testing/web-platform/tests/media-source/mediasource-avtracks.html124
-rw-r--r--testing/web-platform/tests/media-source/mediasource-buffered.html233
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype-play-implicit.html89
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype-play-negative.html122
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype-play-without-codecs-parameter.html52
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype-play.html48
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype-util.js359
-rw-r--r--testing/web-platform/tests/media-source/mediasource-changetype.html149
-rw-r--r--testing/web-platform/tests/media-source/mediasource-closed.html140
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-a-bitrate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-audio-bitrate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-framesize.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-video-bitrate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-bitrate.html17
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framerate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framesize.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-a-bitrate.html17
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-av-audio-bitrate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-av-framesize.html17
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-av-video-bitrate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-v-bitrate.html17
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framerate.html18
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framesize.html17
-rw-r--r--testing/web-platform/tests/media-source/mediasource-config-changes.js116
-rw-r--r--testing/web-platform/tests/media-source/mediasource-correct-frames-after-reappend.html162
-rw-r--r--testing/web-platform/tests/media-source/mediasource-correct-frames.html146
-rw-r--r--testing/web-platform/tests/media-source/mediasource-detach.html54
-rw-r--r--testing/web-platform/tests/media-source/mediasource-duration-boundaryconditions.html63
-rw-r--r--testing/web-platform/tests/media-source/mediasource-duration.html383
-rw-r--r--testing/web-platform/tests/media-source/mediasource-endofstream-invaliderror.html53
-rw-r--r--testing/web-platform/tests/media-source/mediasource-endofstream.html73
-rw-r--r--testing/web-platform/tests/media-source/mediasource-errors.html273
-rw-r--r--testing/web-platform/tests/media-source/mediasource-getvideoplaybackquality.html69
-rw-r--r--testing/web-platform/tests/media-source/mediasource-h264-play-starved.html57
-rw-r--r--testing/web-platform/tests/media-source/mediasource-invalid-codec.html45
-rw-r--r--testing/web-platform/tests/media-source/mediasource-is-type-supported.html106
-rw-r--r--testing/web-platform/tests/media-source/mediasource-liveseekable.html137
-rw-r--r--testing/web-platform/tests/media-source/mediasource-multiple-attach.html114
-rw-r--r--testing/web-platform/tests/media-source/mediasource-play-then-seek-back.html57
-rw-r--r--testing/web-platform/tests/media-source/mediasource-play.html61
-rw-r--r--testing/web-platform/tests/media-source/mediasource-preload.html72
-rw-r--r--testing/web-platform/tests/media-source/mediasource-redundant-seek.html73
-rw-r--r--testing/web-platform/tests/media-source/mediasource-remove.html324
-rw-r--r--testing/web-platform/tests/media-source/mediasource-removesourcebuffer.html146
-rw-r--r--testing/web-platform/tests/media-source/mediasource-replay.html41
-rw-r--r--testing/web-platform/tests/media-source/mediasource-seek-beyond-duration.html105
-rw-r--r--testing/web-platform/tests/media-source/mediasource-seek-during-pending-seek.html189
-rw-r--r--testing/web-platform/tests/media-source/mediasource-seekable.html67
-rw-r--r--testing/web-platform/tests/media-source/mediasource-sequencemode-append-buffer.html137
-rw-r--r--testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode-timestamps.html52
-rw-r--r--testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode.html142
-rw-r--r--testing/web-platform/tests/media-source/mediasource-sourcebuffer-trackdefaults.html78
-rw-r--r--testing/web-platform/tests/media-source/mediasource-sourcebufferlist.html97
-rw-r--r--testing/web-platform/tests/media-source/mediasource-timestamp-offset.html125
-rw-r--r--testing/web-platform/tests/media-source/mediasource-trackdefault.html101
-rw-r--r--testing/web-platform/tests/media-source/mediasource-trackdefaultlist.html60
-rw-r--r--testing/web-platform/tests/media-source/mediasource-util.js412
-rw-r--r--testing/web-platform/tests/media-source/mp3/sound_5.mp3bin0 -> 23442 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/h264-starvation-init.mp4bin0 -> 783 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/h264-starvation-media.mp4bin0 -> 172298 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/invalid-codec.mp4bin0 -> 1542 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch.mp4bin0 -> 17408 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch.mp4bin0 -> 17685 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.mp4bin0 -> 81565 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.mp4bin0 -> 69474 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.mp4bin0 -> 69948 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.mp4bin0 -> 95171 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-boxes-audio.mp4bin0 -> 64208 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-boxes-video.mp4bin0 -> 344085 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr.mp4bin0 -> 38738 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr.mp4bin0 -> 34009 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr.mp4bin0 -> 27764 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr.mp4bin0 -> 55293 bytes
-rw-r--r--testing/web-platform/tests/media-source/mp4/test.mp4bin0 -> 187227 bytes
-rw-r--r--testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-addsourcebuffer.html203
-rw-r--r--testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-appendencodedchunks-play.html83
-rw-r--r--testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/vp9.mp4bin0 -> 6159 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/invalid-codec.webmbin0 -> 1206 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch.webmbin0 -> 9840 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch.webmbin0 -> 10735 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webmbin0 -> 76501 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.webmbin0 -> 80692 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.webmbin0 -> 81467 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.webmbin0 -> 132509 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr.webmbin0 -> 38195 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr.webmbin0 -> 39228 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr.webmbin0 -> 48190 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr-manifest.json4
-rw-r--r--testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr.webmbin0 -> 64318 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-vp8-vorbis-webvtt.webmbin0 -> 143662 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test-vp9.webmbin0 -> 44353 bytes
-rw-r--r--testing/web-platform/tests/media-source/webm/test.webmbin0 -> 190970 bytes
-rw-r--r--testing/web-platform/tests/media/1x1-green.pngbin0 -> 135 bytes
-rw-r--r--testing/web-platform/tests/media/2048x1360-random.jpgbin0 -> 4520440 bytes
-rw-r--r--testing/web-platform/tests/media/2x2-green.mp4bin0 -> 3503 bytes
-rw-r--r--testing/web-platform/tests/media/2x2-green.ogvbin0 -> 7660 bytes
-rw-r--r--testing/web-platform/tests/media/400x300-red-resize-200x150-green.mp4bin0 -> 17425 bytes
-rw-r--r--testing/web-platform/tests/media/400x300-red-resize-200x150-green.webmbin0 -> 11901 bytes
-rw-r--r--testing/web-platform/tests/media/A4.mp4bin0 -> 53409 bytes
-rw-r--r--testing/web-platform/tests/media/A4.ogvbin0 -> 94372 bytes
-rw-r--r--testing/web-platform/tests/media/META.yml2
-rw-r--r--testing/web-platform/tests/media/counting.mp4bin0 -> 311336 bytes
-rw-r--r--testing/web-platform/tests/media/counting.ogvbin0 -> 187773 bytes
-rw-r--r--testing/web-platform/tests/media/foo-no-cors.vtt4
-rw-r--r--testing/web-platform/tests/media/foo.vtt4
-rw-r--r--testing/web-platform/tests/media/foo.vtt.headers1
-rw-r--r--testing/web-platform/tests/media/green-at-15.mp4bin0 -> 299193 bytes
-rw-r--r--testing/web-platform/tests/media/green-at-15.ogvbin0 -> 287648 bytes
-rw-r--r--testing/web-platform/tests/media/movie_300.mp4bin0 -> 2757913 bytes
-rw-r--r--testing/web-platform/tests/media/movie_300.ogvbin0 -> 2344665 bytes
-rw-r--r--testing/web-platform/tests/media/movie_5.mp4bin0 -> 31603 bytes
-rw-r--r--testing/web-platform/tests/media/movie_5.ogvbin0 -> 18645 bytes
-rw-r--r--testing/web-platform/tests/media/poster.pngbin0 -> 14109 bytes
-rw-r--r--testing/web-platform/tests/media/sine440.mp3bin0 -> 80666 bytes
-rw-r--r--testing/web-platform/tests/media/sound_0.mp3bin0 -> 539 bytes
-rw-r--r--testing/web-platform/tests/media/sound_0.ogabin0 -> 4239 bytes
-rw-r--r--testing/web-platform/tests/media/sound_5.mp3bin0 -> 23442 bytes
-rw-r--r--testing/web-platform/tests/media/sound_5.ogabin0 -> 18541 bytes
-rw-r--r--testing/web-platform/tests/media/test-1s.mp4bin0 -> 13932 bytes
-rw-r--r--testing/web-platform/tests/media/test-1s.webmbin0 -> 23171 bytes
-rw-r--r--testing/web-platform/tests/media/test-a-128k-44100Hz-1ch.webmbin0 -> 9840 bytes
-rw-r--r--testing/web-platform/tests/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webmbin0 -> 76501 bytes
-rw-r--r--testing/web-platform/tests/media/test-v-128k-320x240-24fps-8kfr.webmbin0 -> 38195 bytes
-rw-r--r--testing/web-platform/tests/media/test.mp4bin0 -> 192844 bytes
-rw-r--r--testing/web-platform/tests/media/test.ogvbin0 -> 146510 bytes
-rw-r--r--testing/web-platform/tests/media/video.ogvbin0 -> 53189 bytes
-rw-r--r--testing/web-platform/tests/media/white.mp4bin0 -> 13713 bytes
-rw-r--r--testing/web-platform/tests/media/white.webmbin0 -> 10880 bytes
-rw-r--r--testing/web-platform/tests/mediacapture-extensions/GUM-backgroundBlur.https.html150
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/HTMLCanvasElement-getImageData-noframe.html30
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/META.yml5
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/capture.html38
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/creation.html46
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/cross-origin.html41
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/ended.html41
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/historical.html17
-rw-r--r--testing/web-platform/tests/mediacapture-fromelement/idlharness.window.js38
-rw-r--r--testing/web-platform/tests/mediacapture-handle/identity/MediaDevices-setCaptureHandleConfig.https.window.js54
-rw-r--r--testing/web-platform/tests/mediacapture-image/ImageCapture-MediaTrackSupportedConstraints.https.html29
-rw-r--r--testing/web-platform/tests/mediacapture-image/ImageCapture-creation.https.html100
-rw-r--r--testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame-crash.html11
-rw-r--r--testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame.html46
-rw-r--r--testing/web-platform/tests/mediacapture-image/ImageCapture-track.html31
-rw-r--r--testing/web-platform/tests/mediacapture-image/META.yml4
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-fast.html66
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-getSettings.https.html119
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-reject.https.html78
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints.https.html112
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-clone.https.html362
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities-fast.html29
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities.https.html159
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getConstraints.https.html63
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings-fast.html29
-rw-r--r--testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings.https.html89
-rw-r--r--testing/web-platform/tests/mediacapture-image/detached-HTMLCanvasElement.html26
-rw-r--r--testing/web-platform/tests/mediacapture-image/getPhotoCapabilities.html73
-rw-r--r--testing/web-platform/tests/mediacapture-image/getPhotoSettings.html61
-rw-r--r--testing/web-platform/tests/mediacapture-image/getusermedia.https.html26
-rw-r--r--testing/web-platform/tests/mediacapture-image/idlharness.window.js25
-rw-r--r--testing/web-platform/tests/mediacapture-image/resources/imagecapture-helpers.js55
-rw-r--r--testing/web-platform/tests/mediacapture-image/setOptions-reject.html51
-rw-r--r--testing/web-platform/tests/mediacapture-image/takePhoto-with-PhotoSettings.html63
-rw-r--r--testing/web-platform/tests/mediacapture-image/takePhoto-without-PhotoCapabilities.https.window.js6
-rw-r--r--testing/web-platform/tests/mediacapture-image/takePhoto.html61
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-audio.https.html97
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-service-worker.https.html24
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-shared-worker.https.html22
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-worker.https.html39
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-pipes-data-in-worker.https.html41
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-video.https.html285
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-audio.https.html54
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-backpressure.https.html69
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-video.https.html97
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-worker.js17
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/VideoTrackGenerator.https.html327
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/dedicated-worker.js11
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/service-worker.js8
-rw-r--r--testing/web-platform/tests/mediacapture-insertable-streams/shared-worker.js11
-rw-r--r--testing/web-platform/tests/mediacapture-record/BlobEvent-constructor.html38
-rw-r--r--testing/web-platform/tests/mediacapture-record/META.yml3
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-bitrate.https.html230
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-canvas-media-source.https.html128
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-creation.https.html59
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-destroy-script-execution.html79
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-detached-context.html26
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-disabled-tracks.https.html56
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-error.html62
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-events-and-exceptions.html108
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-mimetype.html205
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-pause-resume.html89
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection-no-sink.https.html47
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection.https.html86
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-start.html25
-rw-r--r--testing/web-platform/tests/mediacapture-record/MediaRecorder-stop.html151
-rw-r--r--testing/web-platform/tests/mediacapture-record/idlharness.window.js40
-rw-r--r--testing/web-platform/tests/mediacapture-record/passthrough/MediaRecorder-passthrough.https.html74
-rw-r--r--testing/web-platform/tests/mediacapture-record/support/MediaRecorder-iframe.html20
-rw-r--r--testing/web-platform/tests/mediacapture-record/utils/peerconnection.js141
-rw-r--r--testing/web-platform/tests/mediacapture-record/utils/sources.js75
-rw-r--r--testing/web-platform/tests/mediacapture-region/CropTarget-fromElement.https.html92
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-api.https.html22
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-deny.https.html35
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-empty-option-param.https.html34
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-impossible-constraint.https.html37
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-invalid-facing-mode.https.html31
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-non-applicable-constraint.https.html77
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-optional-constraint.https.html32
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-required-constraint-with-ideal-value.https.html33
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-trivial-constraint.https.html32
-rw-r--r--testing/web-platform/tests/mediacapture-streams/GUM-unknownkey-option-param.https.html31
-rw-r--r--testing/web-platform/tests/mediacapture-streams/META.yml5
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-SecureContext.html19
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-after-discard.https.html64
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html30
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html.headers1
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html30
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html.headers1
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-per-origin-ids.sub.https.html87
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-persistent-permission.https.html38
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-returned-objects.https.html59
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices.https.html64
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-getSupportedConstraints.https.html48
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaDevices-getUserMedia.https.html126
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-firstframe.https.html106
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-preload-none.https.html84
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html476
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-add-audio-track.https.html42
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-audio-only.https.html32
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-clone.https.html98
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-default-feature-policy.https.html84
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-finished-add.https.html35
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-gettrackid.https.html29
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-id.https.html30
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-idl.https.html77
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-removetrack.https.html140
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-supported-by-feature-policy.html15
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStream-video-only.https.html32
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-audio-is-silence.https.html59
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-video-is-black.https.html56
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-applyConstraints.https.html83
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-end-manual.https.html54
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getCapabilities.https.html153
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getSettings.https.html222
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-id.https.html27
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-audio-transfer.https.html30
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-transfer.https.html30
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-init.https.html39
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer-video.https.html26
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer.https.html50
-rw-r--r--testing/web-platform/tests/mediacapture-streams/MediaStreamTrackEvent-constructor.https.html42
-rw-r--r--testing/web-platform/tests/mediacapture-streams/crashtests/enumerateDevices-after-discard-1.https.html18
-rw-r--r--testing/web-platform/tests/mediacapture-streams/enumerateDevices-with-navigation.https.html77
-rw-r--r--testing/web-platform/tests/mediacapture-streams/historical.https.html33
-rw-r--r--testing/web-platform/tests/mediacapture-streams/idlharness.https.window.js50
-rw-r--r--testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html2
-rw-r--r--testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html.headers1
-rw-r--r--testing/web-platform/tests/mediacapture-streams/iframe-enumerate.html2
-rw-r--r--testing/web-platform/tests/mediacapture-streams/message-enumerateddevices.js8
-rw-r--r--testing/web-platform/tests/mediacapture-streams/parallel-capture-requests.https.html57
-rw-r--r--testing/web-platform/tests/mediacapture-streams/permission-helper.js24
-rw-r--r--testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer-video.html27
-rw-r--r--testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer.html21
-rw-r--r--testing/web-platform/tests/mediasession/META.yml3
-rw-r--r--testing/web-platform/tests/mediasession/README.md20
-rw-r--r--testing/web-platform/tests/mediasession/helper/artwork-generator.html18
-rw-r--r--testing/web-platform/tests/mediasession/idlharness.window.js18
-rw-r--r--testing/web-platform/tests/mediasession/mediametadata.html219
-rw-r--r--testing/web-platform/tests/mediasession/playbackstate.html27
-rw-r--r--testing/web-platform/tests/mediasession/positionstate.html106
-rw-r--r--testing/web-platform/tests/mediasession/setactionhandler.html34
-rw-r--r--testing/web-platform/tests/mediasession/setcameraactive.html12
-rw-r--r--testing/web-platform/tests/mediasession/setmicrophoneactive.html12
340 files changed, 20148 insertions, 0 deletions
diff --git a/testing/web-platform/tests/media-capabilities/META.yml b/testing/web-platform/tests/media-capabilities/META.yml
new file mode 100644
index 0000000000..2bd00efb9a
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/META.yml
@@ -0,0 +1,3 @@
+spec: https://w3c.github.io/media-capabilities/
+suggested_reviewers:
+ - mounirlamouri
diff --git a/testing/web-platform/tests/media-capabilities/README.md b/testing/web-platform/tests/media-capabilities/README.md
new file mode 100644
index 0000000000..cfe994976c
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/README.md
@@ -0,0 +1,14 @@
+# Media Capabilities specification Tests
+
+The Media Capabilities specification is available here: https://wicg.github.io/media-capabilities
+
+GitHub repository: https://github.com/WICG/media-capabilities
+
+File an issue: https://github.com/wicg/media-capabilities/issues/new
+
+## Status of these tests
+
+These tests are still very early. The specification is still WIP and they try to
+reflect as much as possible the current state of the specification. Please file
+issues if there are inconsistencies between the specification and the tests or
+if tests are obviously missing.
diff --git a/testing/web-platform/tests/media-capabilities/decodingInfo.any.js b/testing/web-platform/tests/media-capabilities/decodingInfo.any.js
new file mode 100644
index 0000000000..061a226e3f
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/decodingInfo.any.js
@@ -0,0 +1,406 @@
+// META: timeout=long
+'use strict';
+
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalVideoConfiguration = {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalAudioConfiguration = {
+ contentType: 'audio/webm; codecs="opus"',
+};
+
+// AudioConfiguration with optional spatialRendering param.
+var audioConfigurationWithSpatialRendering = {
+ contentType: 'audio/webm; codecs="opus"',
+ spatialRendering: true,
+};
+
+// VideoConfiguration with optional hdrMetadataType, colorGamut, and
+// transferFunction properties.
+var videoConfigurationWithDynamicRange = {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ hdrMetadataType: "smpteSt2086",
+ colorGamut: "srgb",
+ transferFunction: "srgb",
+}
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo());
+}, "Test that decodingInfo rejects if it doesn't get a configuration");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({}));
+}, "Test that decodingInfo rejects if the MediaConfiguration isn't valid");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }));
+}, "Test that decodingInfo rejects if the MediaConfiguration does not have a type");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ }));
+}, "Test that decodingInfo rejects if the configuration doesn't have an audio or video field");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: -1,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a negative framerate");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 0,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a framerate set to 0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: Infinity,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a framerate set to Infinity");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'audio/fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType isn't of type video");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'application/ogg; codec=vorbis',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType is of type audio");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: {
+ contentType: 'application/ogg; codec=theora',
+ channels: 2,
+ },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType is of type video");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"; foo="bar"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType has more than one parameter");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; foo="bar"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType has one parameter that isn't codecs");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/1001',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of x/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/0',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of x/0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '0/10001',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of 0/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '-24000/10001',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of -x/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/-10001',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of x/-y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate in the form of x/");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '1/3x',
+ }
+ }));
+}, "Test that decodingInfo() rejects framerate with trailing unallowed characters");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: { contentType: 'fgeoa' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contenType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: { contentType: 'video/fgeoa' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType isn't of type audio");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: { contentType: 'audio/webm; codecs="opus"; foo="bar"' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType has more than one parameters");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: { contentType: 'audio/webm; foo="bar"' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType has one parameter that isn't codecs");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ assert_equals(typeof ability.keySystemAccess, "object");
+ });
+}, "Test that decodingInfo returns a valid MediaCapabilitiesInfo objects");
+
+async_test(t => {
+ var validTypes = [ 'file', 'media-source' ];
+ var invalidTypes = [ undefined, null, '', 'foobar', 'mse', 'MediaSource',
+ 'record', 'transmission' ];
+
+ var validPromises = [];
+ var invalidCaught = 0;
+
+ validTypes.forEach(type => {
+ validPromises.push(navigator.mediaCapabilities.decodingInfo({
+ type: type,
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }));
+ });
+
+ // validTypes are tested via Promise.all(validPromises) because if one of the
+ // promises fail, Promise.all() will reject. This mechanism can't be used for
+ // invalid types which will be tested individually and increment invalidCaught
+ // when rejected until the amount of rejection matches the expectation.
+ Promise.all(validPromises).then(t.step_func(() => {
+ for (var i = 0; i < invalidTypes.length; ++i) {
+ navigator.mediaCapabilities.decodingInfo({
+ type: invalidTypes[i],
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(t.unreached_func(), t.step_func(e => {
+ assert_equals(e.name, 'TypeError');
+ ++invalidCaught;
+ if (invalidCaught == invalidTypes.length)
+ t.done();
+ }));
+ }
+ }), t.unreached_func('Promise.all should not reject for valid types'));
+}, "Test that decodingInfo rejects if the MediaConfiguration does not have a valid type");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: audioConfigurationWithSpatialRendering,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ assert_equals(typeof ability.keySystemAccess, "object");
+ });
+}, "Test that decodingInfo with spatialRendering set returns a valid MediaCapabilitiesInfo objects");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: videoConfigurationWithDynamicRange,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ assert_equals(typeof ability.keySystemAccess, "object");
+ });
+}, "Test that decodingInfo with hdrMetadataType, colorGamut, and transferFunction set returns a valid MediaCapabilitiesInfo objects");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ hdrMetadataType: ""
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has an empty hdrMetadataType");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ colorGamut: true
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a colorGamut set to true");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ transferFunction: 3
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a transferFunction set to 3");
diff --git a/testing/web-platform/tests/media-capabilities/decodingInfo.webrtc.html b/testing/web-platform/tests/media-capabilities/decodingInfo.webrtc.html
new file mode 100644
index 0000000000..f283956100
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/decodingInfo.webrtc.html
@@ -0,0 +1,217 @@
+<!DOCTYPE html>
+<title>MediaCapabilities.decodingInfo() for webrtc</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+const minimalVideoConfiguration = {
+ contentType: 'video/VP9; profile-level="0"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+const minimalAudioConfiguration = {
+ contentType: 'audio/opus',
+};
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ }));
+}, "Test that decodingInfo rejects if the configuration doesn't have an audio or video field");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: -1,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a negative framerate");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 0,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a framerate set to 0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: Infinity,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration has a framerate set to Infinity");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'audio/fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that decodingInfo rejects if the video configuration contentType isn't of type video");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ audio: { contentType: 'fgeoa' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ audio: { contentType: 'video/fgeoa' },
+ }));
+}, "Test that decodingInfo rejects if the audio configuration contentType isn't of type audio");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ });
+}, "Test that decodingInfo returns a valid MediaCapabilitiesInfo objects");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_false(ability.supported);
+ assert_false(ability.smooth);
+ assert_false(ability.powerEfficient);
+ });
+}, "Test that decodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc video content type.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: minimalVideoConfiguration,
+ audio: {
+ contentType: 'audio/webm; codecs="opus"',
+ },
+ }).then(ability => {
+ assert_false(ability.supported);
+ assert_false(ability.smooth);
+ assert_false(ability.powerEfficient);
+ });
+}, "Test that decodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc audio content type.");
+
+const validAudioCodecs = (() => {
+ // Some codecs that are returned by getCapabilities() are not real codecs,
+ // exclude these from the test.
+ const excludeList = [ 'audio/CN', 'audio/telephone-event', 'audio/red' ];
+ const audioCodecs = [];
+ RTCRtpReceiver.getCapabilities("audio")['codecs'].forEach(codec => {
+ if (excludeList.indexOf(codec.mimeType) < 0 &&
+ audioCodecs.indexOf(codec.mimeType) < 0) {
+ audioCodecs.push(codec.mimeType);
+ }
+ });
+ return audioCodecs;
+})();
+
+validAudioCodecs.forEach(codec => {
+ promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ audio: {
+ contentType: codec
+ }
+ }).then(ability => {
+ assert_true(ability.supported);
+ });
+}, "Test that decodingInfo returns supported true for the codec " + codec + " returned by RTCRtpReceiver.getCapabilities()")}
+);
+
+const validVideoCodecs = (() => {
+ // Some codecs that are returned by getCapabilities() are not real codecs but
+ // only used for error correction, exclude these from the test.
+ const excludeList = [ 'video/rtx', 'video/red', 'video/ulpfec',
+ 'video/flexfec-03' ];
+ const videoCodecs = [];
+
+ RTCRtpReceiver.getCapabilities("video")['codecs'].forEach(codec => {
+ if (excludeList.indexOf(codec.mimeType) < 0) {
+ let mimeType = codec.mimeType;
+ if ('sdpFmtpLine' in codec) {
+ mimeType += "; " + codec.sdpFmtpLine;
+ }
+ if (!(mimeType in videoCodecs)) {
+ videoCodecs.push(mimeType);
+ }
+ }
+ });
+ return videoCodecs;
+})();
+
+validVideoCodecs.forEach(codec => {
+ promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: codec,
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ }
+ }).then(ability => {
+ assert_true(ability.supported);
+ });
+}, "Test that decodingInfo returns supported true for the codec " + codec + " returned by RTCRtpReceiver.getCapabilities()")}
+);
+
+</script>
diff --git a/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.http.html b/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.http.html
new file mode 100644
index 0000000000..267b23431b
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.http.html
@@ -0,0 +1,31 @@
+<!DOCTYPE html>
+<title>MediaCapabilities.decodingInfo() for encrypted media (non-secure context)</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalVideoConfiguration = {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal MediaCapabilitiesKeySystemConfiguration that will be allowed per
+// spec. All optional properties are missing.
+var minimalKeySystemConfiguration = {
+ keySystem: 'org.w3.clearkey'
+};
+
+promise_test(t => {
+ return promise_rejects_dom(t, 'SecurityError', navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ }));
+}, "Test that decodingInfo() with a keySystemConfiguration fails on a non-secure context.");
+
+</script>
diff --git a/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.https.html b/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.https.html
new file mode 100644
index 0000000000..7ac914de89
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/decodingInfoEncryptedMedia.https.html
@@ -0,0 +1,262 @@
+<!DOCTYPE html>
+<title>MediaCapabilities.decodingInfo() for encrypted media</title>
+<meta name="timeout" content="long">
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalVideoConfiguration = {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalAudioConfiguration = {
+ contentType: 'audio/webm; codecs="opus"',
+};
+
+// Minimal MediaCapabilitiesKeySystemConfiguration that will be allowed per
+// spec. All optional properties are missing.
+var minimalKeySystemConfiguration = {
+ keySystem: 'org.w3.clearkey',
+};
+
+// Config with bogus name not provided by any UA.
+var bogusKeySystemConfiguration = {
+ keySystem: 'bogus',
+};
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ });
+}, "Test that decodingInfo() accepts a stub key system configuration (w/video).");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ });
+}, "Test that decodingInfo() accepts a stub key system configuration (w/audio).");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ video: {
+ robustness: '',
+ },
+ },
+ });
+}, "Test that decodingInfo() accepts a key system configuration with video info.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ },
+ });
+}, "Test that decodingInfo() accepts a key system configuration with audio info.");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ video: {
+ robustness: '',
+ },
+ },
+ }));
+}, "Test that decodingInfo() rejects if robustness and configuration do not match (1).");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ },
+ }));
+}, "Test that decodingInfo() rejects if robustness and configuration do not match (2).");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ video: {
+ robustness: '',
+ },
+ },
+ }));
+}, "Test that decodingInfo() rejects if robustness and configuration do not match (3).");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ video: {
+ robustness: '',
+ },
+ persistentState: "foobar",
+ },
+ }));
+}, "Test that decodingInfo() rejects if persistentState isn't valid.");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ video: {
+ robustness: '',
+ },
+ distinctiveIdentifier: "foobar",
+ },
+ }));
+}, "Test that decodingInfo() rejects if distinctiveIdentifier isn't valid.");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'org.w3.clearkey',
+ audio : {
+ robustness: '',
+ },
+ video: {
+ robustness: '',
+ },
+ sessionTypes: "foobar",
+ },
+ }));
+}, "Test that decodingInfo() rejects if sessionTypes isn't a sequence.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ audio: minimalAudioConfiguration,
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: {},
+ initDataType: {},
+ audio : {
+ robustness: '',
+ },
+ video: {
+ robustness: '',
+ },
+ },
+ });
+}, "Test that decodingInfo() does not reject when properties are set to unexpected values.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ assert_equals(typeof ability.keySystemAccess, "object");
+ });
+}, "Test that decodingInfo returns a valid MediaCapabilitiesDecodingInfo objects with encrypted media");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: {
+ keySystem: 'foobar',
+ video: {
+ robustness: '',
+ },
+ }
+ }).then(ability => {
+ assert_false(ability.supported);
+ assert_false(ability.smooth);
+ assert_false(ability.powerEfficient);
+ assert_equals(ability.keySystemAccess, null);
+ });
+}, "Test that random key systems are reported as non supported.");
+
+// TODO(mlamouri): this test could be split in two tests for which codec support
+// across browsers is widely compatible: one when all browsers wouldn't support
+// and one where all browsers do support. The current approach is to check that
+// the answer is consistent to the spec.
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ }).then(ability => {
+ if (ability.supported)
+ assert_not_equals(ability.keySystemAccess, null);
+ else
+ assert_equals(ability.keySystemAccess, null);
+ });
+}, "Test that keySystemAccess is only null when not supported if keySystemConfiguration was used.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.decodingInfo({
+ type: 'file',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ // Supply bogus config to reliably result in a null keySystemAccess.
+ keySystemConfiguration: bogusKeySystemConfiguration,
+ }).then(ability => {
+ assert_equals(ability.keySystemAccess, null);
+ assert_false(ability.supported);
+ });
+}, "Test that supported=false when keySystemConfiguration is unsupported.");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.decodingInfo({
+ type: 'webrtc',
+ video: minimalVideoConfiguration,
+ keySystemConfiguration: minimalKeySystemConfiguration,
+ }));
+}, "Test that decodingInfo() with type webrtc rejects key system configuration.");
+
+</script>
diff --git a/testing/web-platform/tests/media-capabilities/encodingInfo.any.js b/testing/web-platform/tests/media-capabilities/encodingInfo.any.js
new file mode 100644
index 0000000000..6882b0ae91
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/encodingInfo.any.js
@@ -0,0 +1,310 @@
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalVideoConfiguration = {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal WebRTC VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalWebrtcVideoConfiguration = {
+ contentType: 'video/VP9',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalAudioConfiguration = {
+ contentType: 'audio/webm; codecs="opus"',
+};
+
+// Minimal WebRTC AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+var minimalWebrtcAudioConfiguration = {
+ contentType: 'audio/opus',
+};
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo());
+}, "Test that encodingInfo rejects if it doesn't get a configuration");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({}));
+}, "Test that encodingInfo rejects if the MediaConfiguration isn't valid");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }));
+}, "Test that encodingInfo rejects if the MediaConfiguration does not have a type");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ }));
+}, "Test that encodingInfo rejects if the configuration doesn't have an audio or video field");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: -1,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a negative framerate");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 0,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a framerate set to 0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: Infinity,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a framerate set to Infinity");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'audio/fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType isn't of type video");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"; foo="bar"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType has more than one parameter");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; foo="bar"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType has one parameter that isn't codecs");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/1001',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of x/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/0',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of x/0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '0/10001',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of 0/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '-24000/10001',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of -x/y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/-10001',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of x/-y");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '24000/',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate in the form of x/");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: '1/3x',
+ }
+ }));
+}, "Test that encodingInfo() rejects framerate with trailing unallowed characters");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ audio: { contentType: 'fgeoa' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contenType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ audio: { contentType: 'video/fgeoa' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contentType isn't of type audio");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ audio: { contentType: 'audio/webm; codecs="opus"; foo="bar"' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contentType has more than one parameters");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ audio: { contentType: 'audio/webm; foo="bar"' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contentType has one parameter that isn't codecs");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'record',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ });
+}, "Test that encodingInfo returns a valid MediaCapabilitiesInfo objects for record type");
+
+async_test(t => {
+ var validTypes = [ 'record', 'webrtc' ];
+ var invalidTypes = [ undefined, null, '', 'foobar', 'mse', 'MediaSource',
+ 'file', 'media-source', ];
+
+ var validPromises = [];
+ var invalidCaught = 0;
+
+ validTypes.forEach(type => {
+ validPromises.push(navigator.mediaCapabilities.encodingInfo({
+ type: type,
+ video: type != "webrtc" ? minimalVideoConfiguration : minimalWebrtcVideoConfiguration,
+ audio: type != "webrtc" ? minimalAudioConfiguration : minimalWebrtcAudioConfiguration,
+ }));
+ });
+
+ // validTypes are tested via Promise.all(validPromises) because if one of the
+ // promises fail, Promise.all() will reject. This mechanism can't be used for
+ // invalid types which will be tested individually and increment invalidCaught
+ // when rejected until the amount of rejection matches the expectation.
+ Promise.all(validPromises).then(t.step_func(() => {
+ for (var i = 0; i < invalidTypes.length; ++i) {
+ navigator.mediaCapabilities.encodingInfo({
+ type: invalidTypes[i],
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(t.unreached_func(), t.step_func(e => {
+ assert_equals(e.name, 'TypeError');
+ ++invalidCaught;
+ if (invalidCaught == invalidTypes.length)
+ t.done();
+ }));
+ }
+ }), t.unreached_func('Promise.all should not reject for valid types'));
+}, "Test that encodingInfo rejects if the MediaConfiguration does not have a valid type");
diff --git a/testing/web-platform/tests/media-capabilities/encodingInfo.webrtc.html b/testing/web-platform/tests/media-capabilities/encodingInfo.webrtc.html
new file mode 100644
index 0000000000..414b7944f6
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/encodingInfo.webrtc.html
@@ -0,0 +1,217 @@
+<!DOCTYPE html>
+<title>MediaCapabilities.encodingInfo() for webrtc</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+// Minimal VideoConfiguration that will be allowed per spec. All optional
+// properties are missing.
+const minimalVideoConfiguration = {
+ contentType: 'video/VP9; profile-level="0"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+};
+
+// Minimal AudioConfiguration that will be allowed per spec. All optional
+// properties are missing.
+const minimalAudioConfiguration = {
+ contentType: 'audio/opus',
+};
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ }));
+}, "Test that encodingInfo rejects if the configuration doesn't have an audio or video field");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: -1,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a negative framerate");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 0,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a framerate set to 0");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/VP9"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: Infinity,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration has a framerate set to Infinity");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'audio/fgeoa',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ }));
+}, "Test that encodingInfo rejects if the video configuration contentType isn't of type video");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ audio: { contentType: 'fgeoa' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contentType doesn't parse");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ audio: { contentType: 'video/fgeoa' },
+ }));
+}, "Test that encodingInfo rejects if the audio configuration contentType isn't of type audio");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: minimalVideoConfiguration,
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_equals(typeof ability.supported, "boolean");
+ assert_equals(typeof ability.smooth, "boolean");
+ assert_equals(typeof ability.powerEfficient, "boolean");
+ });
+}, "Test that encodingInfo returns a valid MediaCapabilitiesInfo objects");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: 'video/webm; codecs="vp09.00.10.08"',
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ },
+ audio: minimalAudioConfiguration,
+ }).then(ability => {
+ assert_false(ability.supported);
+ assert_false(ability.smooth);
+ assert_false(ability.powerEfficient);
+ });
+}, "Test that encodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc video content type.");
+
+promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: minimalVideoConfiguration,
+ audio: {
+ contentType: 'audio/webm; codecs="opus"',
+ },
+ }).then(ability => {
+ assert_false(ability.supported);
+ assert_false(ability.smooth);
+ assert_false(ability.powerEfficient);
+ });
+}, "Test that encodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc audio content type.");
+
+const validAudioCodecs = (() => {
+ // Some codecs that are returned by getCapabilities() are not real codecs,
+ // exclude these from the test.
+ const excludeList = [ 'audio/CN', 'audio/telephone-event', 'audio/red' ];
+ const audioCodecs = [];
+ RTCRtpSender.getCapabilities("audio")['codecs'].forEach(codec => {
+ if (excludeList.indexOf(codec.mimeType) < 0 &&
+ audioCodecs.indexOf(codec.mimeType) < 0) {
+ audioCodecs.push(codec.mimeType);
+ }
+ });
+ return audioCodecs;
+})();
+
+validAudioCodecs.forEach(codec => {
+ promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ audio: {
+ contentType: codec
+ }
+ }).then(ability => {
+ assert_true(ability.supported);
+ });
+}, "Test that encodingInfo returns supported true for the codec " + codec + " returned by RTCRtpSender.getCapabilities()")}
+);
+
+const validVideoCodecs = (() => {
+ // Some codecs that are returned by getCapabilities() are not real codecs but
+ // only used for error correction, exclude these from the test.
+ const excludeList = [ 'video/rtx', 'video/red', 'video/ulpfec',
+ 'video/flexfec-03' ];
+ const videoCodecs = [];
+
+ RTCRtpSender.getCapabilities("video")['codecs'].forEach(codec => {
+ if (excludeList.indexOf(codec.mimeType) < 0) {
+ let mimeType = codec.mimeType;
+ if ('sdpFmtpLine' in codec) {
+ mimeType += "; " + codec.sdpFmtpLine;
+ }
+ if (!(mimeType in videoCodecs)) {
+ videoCodecs.push(mimeType);
+ }
+ }
+ });
+ return videoCodecs;
+})();
+
+validVideoCodecs.forEach(codec => {
+ promise_test(t => {
+ return navigator.mediaCapabilities.encodingInfo({
+ type: 'webrtc',
+ video: {
+ contentType: codec,
+ width: 800,
+ height: 600,
+ bitrate: 3000,
+ framerate: 24,
+ }
+ }).then(ability => {
+ assert_true(ability.supported);
+ });
+}, "Test that encodingInfo returns supported true for the codec " + codec + " returned by RTCRtpSender.getCapabilities()")}
+);
+
+</script>
diff --git a/testing/web-platform/tests/media-capabilities/idlharness.any.js b/testing/web-platform/tests/media-capabilities/idlharness.any.js
new file mode 100644
index 0000000000..6da5c7d284
--- /dev/null
+++ b/testing/web-platform/tests/media-capabilities/idlharness.any.js
@@ -0,0 +1,25 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+// https://wicg.github.io/media-capabilities/
+
+'use strict';
+
+promise_test(async () => {
+ idl_test(
+ ['media-capabilities'],
+ ['html', 'cssom-view'],
+ idl_array => {
+ if (self.GLOBAL.isWorker()) {
+ idl_array.add_objects({ WorkerNavigator: ['navigator'] });
+ } else {
+ idl_array.add_objects({ Navigator: ['navigator'] });
+ }
+ idl_array.add_objects({
+ MediaCapabilities: ['navigator.mediaCapabilities'],
+ Screen: ['screen'],
+ ScreenLuminance: ['screen.luminance'],
+ });
+ }
+ );
+});
diff --git a/testing/web-platform/tests/media-playback-quality/META.yml b/testing/web-platform/tests/media-playback-quality/META.yml
new file mode 100644
index 0000000000..51b1b4e07e
--- /dev/null
+++ b/testing/web-platform/tests/media-playback-quality/META.yml
@@ -0,0 +1,3 @@
+spec: https://w3c.github.io/media-playback-quality/
+suggested_reviewers:
+ - mounirlamouri
diff --git a/testing/web-platform/tests/media-playback-quality/idlharness.window.js b/testing/web-platform/tests/media-playback-quality/idlharness.window.js
new file mode 100644
index 0000000000..2444e30c70
--- /dev/null
+++ b/testing/web-platform/tests/media-playback-quality/idlharness.window.js
@@ -0,0 +1,20 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+// https://w3c.github.io/media-playback-quality/
+
+'use strict';
+
+idl_test(
+ ['media-playback-quality'],
+ ['html', 'dom'],
+ idl_array => {
+ idl_array.add_objects({
+ HTMLVideoElement: ['video'],
+ VideoPlaybackQuality: ['videoPlaybackQuality']
+ });
+
+ self.video = document.createElement('video');
+ self.videoPlaybackQuality = video.getVideoPlaybackQuality();
+ }
+);
diff --git a/testing/web-platform/tests/media-source/META.yml b/testing/web-platform/tests/media-source/META.yml
new file mode 100644
index 0000000000..d1252f5af1
--- /dev/null
+++ b/testing/web-platform/tests/media-source/META.yml
@@ -0,0 +1,3 @@
+spec: https://w3c.github.io/media-source/
+suggested_reviewers:
+ - wolenetz
diff --git a/testing/web-platform/tests/media-source/SourceBuffer-abort-readyState.html b/testing/web-platform/tests/media-source/SourceBuffer-abort-readyState.html
new file mode 100644
index 0000000000..5942379d08
--- /dev/null
+++ b/testing/web-platform/tests/media-source/SourceBuffer-abort-readyState.html
@@ -0,0 +1,72 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>SourceBuffer#abort() when readyState attribute is not in the "open"</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+var contents = {'/media/white.webm': 'video/webm; codecs="vorbis,vp8"',
+ '/media/white.mp4' : 'video/mp4'};
+
+//check the browser supports the MIME used in this test
+function isTypeSupported(mime) {
+ if(!MediaSource.isTypeSupported(mime)) {
+ this.step(function() {
+ assert_unreached("Browser doesn't support the MIME used in this test: " + mime);
+ });
+ this.done();
+ return false;
+ }
+ return true;
+}
+function GET(url, processBody) {
+ var xhr = new XMLHttpRequest();
+ xhr.open('GET', url, true);
+ xhr.responseType = 'arraybuffer';
+ xhr.send();
+ xhr.onload = function(e) {
+ if (xhr.status != 200) {
+ alert("Unexpected status code " + xhr.status + " for " + url);
+ return false;
+ }
+ processBody(new Uint8Array(xhr.response));
+ };
+}
+function mediaTest(file, mime) {
+ async_test(function(t) {
+ if(!isTypeSupported.bind(t)(mime)) {
+ return;
+ }
+ GET(file, function(data) {
+ var mediaSource = new MediaSource();
+ var sourceBuffer = null;
+ mediaSource.addEventListener('sourceopen', function(e) {
+ sourceBuffer = mediaSource.addSourceBuffer(mime);
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, 'ended',
+ 'mediaSource.readyState is "ended" after endOfStream()');
+ });
+ mediaSource.addEventListener('sourceended', t.step_func_done(function(e) {
+ assert_throws_dom('InvalidStateError', function() {
+ sourceBuffer.abort();
+ });
+ }));
+ var video = document.createElement('video');
+ video.src = window.URL.createObjectURL(mediaSource);
+ });
+ }, 'SourceBuffer#abort() (' + mime + ') : If the readyState attribute ' +
+ 'of the parent media source is not in the "open" state then throw ' +
+ 'an INVALID_STATE_ERR exception and abort these steps.');
+}
+for(var file in contents) {
+ mediaTest(file, contents[file]);
+}
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/SourceBuffer-abort-removed.html b/testing/web-platform/tests/media-source/SourceBuffer-abort-removed.html
new file mode 100644
index 0000000000..4782412ccd
--- /dev/null
+++ b/testing/web-platform/tests/media-source/SourceBuffer-abort-removed.html
@@ -0,0 +1,52 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>SourceBuffer#abort() for already removed buffer from parent media source</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+var mimes = ['video/webm; codecs="vorbis,vp8"', 'video/mp4'];
+
+//check the browser supports the MIME used in this test
+function isTypeSupported(mime) {
+ if(!MediaSource.isTypeSupported(mime)) {
+ this.step(function() {
+ assert_unreached("Browser doesn't support the MIME used in this test: " + mime);
+ });
+ this.done();
+ return false;
+ }
+ return true;
+}
+function mediaTest(mime) {
+ async_test(function(t) {
+ if(!isTypeSupported.bind(t)(mime)) {
+ return;
+ }
+ var mediaSource = new MediaSource();
+ mediaSource.addEventListener('sourceopen', t.step_func_done(function(e) {
+ var sourceBuffer = mediaSource.addSourceBuffer(mime);
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_throws_dom('InvalidStateError',
+ function() {
+ sourceBuffer.abort();
+ },
+ 'SourceBuffer#abort() after removing the SourceBuffer object');
+ }), false);
+ var video = document.createElement('video');
+ video.src = window.URL.createObjectURL(mediaSource);
+ }, 'SourceBuffer#abort (' + mime + ') : ' +
+ 'if this object has been removed from the sourceBuffers attribute of the parent media source, ' +
+ 'then throw an INVALID_STATE_ERR exception and abort these steps.');
+}
+mimes.forEach(function(mime) {
+ mediaTest(mime);
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/SourceBuffer-abort-updating.html b/testing/web-platform/tests/media-source/SourceBuffer-abort-updating.html
new file mode 100644
index 0000000000..1132d14663
--- /dev/null
+++ b/testing/web-platform/tests/media-source/SourceBuffer-abort-updating.html
@@ -0,0 +1,92 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>Check SourceBuffer#abort() when the updating attribute is true</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+var contents = {'/media/white.webm': 'video/webm; codecs="vorbis,vp8"',
+ '/media/white.mp4' : 'video/mp4'};
+
+function GET(url, processBody) {
+ var xhr = new XMLHttpRequest();
+ xhr.open('GET', url, true);
+ xhr.responseType = 'arraybuffer';
+ xhr.send();
+ xhr.onload = function(e) {
+ if (xhr.status != 200) {
+ alert("Unexpected status code " + xhr.status + " for " + url);
+ return false;
+ }
+ processBody(new Uint8Array(xhr.response));
+ };
+}
+//check the browser supports the MIME used in this test
+function isTypeSupported(mime) {
+ if(!MediaSource.isTypeSupported(mime)) {
+ this.step(function() {
+ assert_unreached("Browser doesn't support the MIME used in this test: " + mime);
+ });
+ this.done();
+ return false;
+ }
+ return true;
+}
+function mediaTest(file, mime) {
+ async_test(function(t) {
+ if(!isTypeSupported.bind(t)(mime)) {
+ return;
+ }
+ GET(file, function(data) {
+ var mediaSource = new MediaSource();
+ var num_updateend = 0;
+ var events = [];
+ mediaSource.addEventListener('sourceopen', t.step_func(function(e) {
+ var sourceBuffer = mediaSource.addSourceBuffer(mime);
+ assert_equals(sourceBuffer.updating, false);
+ sourceBuffer.addEventListener('updatestart', t.step_func(function(e) {
+ events.push('updatestart');
+ //abort when sourceBuffer#updating is true
+ sourceBuffer.abort();
+
+ assert_equals(sourceBuffer.updating, false,
+ 'Check updating value after calling abort.');
+ assert_equals(sourceBuffer.appendWindowStart, 0);
+ assert_equals(sourceBuffer.appendWindowEnd, Number.POSITIVE_INFINITY);
+ }));
+ sourceBuffer.addEventListener('update', t.step_func(function(e) {
+ assert_unreached("Can't touch this");
+ }));
+ sourceBuffer.addEventListener('updateend', function(e) {
+ events.push('updateend');
+ mediaSource.endOfStream();
+ });
+ sourceBuffer.addEventListener('abort', function(e) {
+ events.push('abort');
+ });
+ sourceBuffer.addEventListener('error', t.step_func(function(e) {
+ assert_unreached("Can't touch this");
+ }));
+ sourceBuffer.appendBuffer(data);
+ }));
+ mediaSource.addEventListener('sourceended', t.step_func_done(function(e) {
+ assert_array_equals(events,
+ ['updatestart', 'abort', 'updateend'],
+ 'Check the sequence of fired events.');
+ }));
+ var video = document.createElement('video');
+ video.src = window.URL.createObjectURL(mediaSource);
+ });
+ }, 'SourceBuffer#abort() (' + mime + ') : Check the algorithm when the updating attribute is true.');
+}
+for(var file in contents) {
+ mediaTest(file, contents[file]);
+}
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/SourceBuffer-abort.html b/testing/web-platform/tests/media-source/SourceBuffer-abort.html
new file mode 100644
index 0000000000..7d7c9ff1de
--- /dev/null
+++ b/testing/web-platform/tests/media-source/SourceBuffer-abort.html
@@ -0,0 +1,34 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>Check the values of appendWindowStart and appendWindowEnd after abort()</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+var mimes = ['video/webm; codecs="vorbis,vp8"', 'video/mp4'];
+
+mimes.forEach(function(mime) {
+ async_test(function() {
+ assert_true(MediaSource.isTypeSupported(mime),
+ "Browser doesn't support the MIME used in this test: " + mime);
+
+ var mediaSource = new MediaSource();
+ mediaSource.addEventListener('sourceopen', this.step_func_done(function(e) {
+ var sourceBuffer = mediaSource.addSourceBuffer(mime);
+ sourceBuffer.abort();
+ assert_equals(sourceBuffer.appendWindowStart, 0);
+ assert_equals(sourceBuffer.appendWindowEnd, Number.POSITIVE_INFINITY);
+ }));
+
+ var video = document.createElement('video');
+ video.src = window.URL.createObjectURL(mediaSource);
+ }, 'SourceBuffer#abort() (' + mime + '): Check the values of appendWindowStart and appendWindowEnd.');
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/URL-createObjectURL-null.html b/testing/web-platform/tests/media-source/URL-createObjectURL-null.html
new file mode 100644
index 0000000000..f2f973a776
--- /dev/null
+++ b/testing/web-platform/tests/media-source/URL-createObjectURL-null.html
@@ -0,0 +1,19 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>URL.createObjectURL(null)</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+test(function() {
+ assert_throws_js(TypeError, function() {
+ window.URL.createObjectURL(null);
+ });
+}, "URL.createObjectURL(null)");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/URL-createObjectURL-revoke.html b/testing/web-platform/tests/media-source/URL-createObjectURL-revoke.html
new file mode 100644
index 0000000000..c5e18d4fd5
--- /dev/null
+++ b/testing/web-platform/tests/media-source/URL-createObjectURL-revoke.html
@@ -0,0 +1,59 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>Revoking a created URL with URL.revokeObjectURL(url)</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+async_test(function(t) {
+ var mediaSource = new MediaSource();
+ var url = window.URL.createObjectURL(mediaSource);
+ window.URL.revokeObjectURL(url);
+ mediaSource.addEventListener('sourceopen',
+ t.unreached_func("url should not reference MediaSource."));
+ var video = document.createElement('video');
+ video.src = url;
+ video.addEventListener('error', t.step_func_done(function(e) {
+ assert_equals(e.target.error.code,
+ MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED,
+ 'Expected error code');
+ assert_equals(mediaSource.readyState, 'closed');
+ }));
+}, "Check revoking behavior of URL.revokeObjectURL(url).");
+async_test(function(t) {
+ var mediaSource = new MediaSource();
+ var url = window.URL.createObjectURL(mediaSource);
+ var video = document.createElement('video');
+ var unexpectedErrorHandler = t.unreached_func("Unexpected error.")
+ video.addEventListener('error', unexpectedErrorHandler);
+ video.src = url;
+ window.URL.revokeObjectURL(url);
+ mediaSource.addEventListener('sourceopen', t.step_func_done(function(e) {
+ assert_equals(mediaSource.readyState, 'open');
+ mediaSource.endOfStream();
+ video.removeEventListener('error', unexpectedErrorHandler);
+ }));
+}, "Check referenced MediaSource can open after URL.revokeObjectURL(url).");
+async_test(function(t) {
+ var mediaSource = new MediaSource();
+ var url = window.URL.createObjectURL(mediaSource);
+ setTimeout(function() {
+ mediaSource.addEventListener('sourceopen',
+ t.unreached_func("url should not reference MediaSource."));
+ var video = document.createElement('video');
+ video.src = url;
+ video.addEventListener('error', t.step_func_done(function(e) {
+ assert_equals(e.target.error.code,
+ MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED,
+ 'Expected error code');
+ assert_equals(mediaSource.readyState, 'closed');
+ }));
+ }, 0);
+}, "Check auto-revoking behavior with URL.createObjectURL(MediaSource).");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/URL-createObjectURL.html b/testing/web-platform/tests/media-source/URL-createObjectURL.html
new file mode 100644
index 0000000000..da82806349
--- /dev/null
+++ b/testing/web-platform/tests/media-source/URL-createObjectURL.html
@@ -0,0 +1,20 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>URL.createObjectURL(mediaSource)</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+test(function() {
+ var mediaSource = new MediaSource();
+ var url = window.URL.createObjectURL(mediaSource);
+ assert_true(url != null);
+ assert_true(url.match(/^blob:.+/) != null);
+}, "URL.createObjectURL(mediaSource) should return a unique Blob URI.");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-message-util.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-message-util.js
new file mode 100644
index 0000000000..c62eb8e3f7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-message-util.js
@@ -0,0 +1,16 @@
+// This script provides an object with common message subjects to assist main
+// and worker thread communication.
+
+const messageSubject = {
+ ERROR: "error", // info field may contain more detail
+ OBJECT_URL: "object url", // info field contains object URL
+ HANDLE: "handle", // info field contains the MediaSourceHandle
+ STARTED_BUFFERING: "started buffering",
+ FINISHED_BUFFERING: "finished buffering",
+ VERIFY_DURATION: "verify duration", // info field contains expected duration
+ AWAIT_DURATION: "await duration", // wait for element duration to match the expected duration in the info field
+ VERIFY_HAVE_NOTHING: "verify have nothing readyState",
+ VERIFY_AT_LEAST_HAVE_METADATA: "verify readyState is at least HAVE_METADATA",
+ ACK_VERIFIED: "verified", // info field contains the message values that requested the verification
+ WORKER_DONE: "worker done", // this lets worker signal main to successfully end the test
+};
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.html
new file mode 100644
index 0000000000..0f74d95372
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.html
@@ -0,0 +1,75 @@
+<!DOCTYPE html>
+<html>
+<title>MediaSource-in-Worker buffering test case with media element detachment at various places</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<body>
+<script>
+
+const AFTER_SETTING_SRCOBJECT = "after setting srcObject";
+const AFTER_STARTED_BUFFERING = "after receiving Started Buffering message from worker";
+const AFTER_FINISHED_BUFFERING = "after receiving Finished Buffering message from worker";
+
+[ AFTER_SETTING_SRCOBJECT, AFTER_STARTED_BUFFERING, AFTER_FINISHED_BUFFERING ].forEach(when => {
+ for (let timeouts = 0; timeouts < 5; ++timeouts) {
+ async_test(test => { startWorkerAndDetachElement(test, when, timeouts); },
+ "Test element detachment from worker MediaSource after at least " + timeouts +
+ " main thread setTimeouts, starting counting " + when);
+ }
+});
+
+function detachElementAfterMultipleSetTimeouts(test, element, timeouts_remaining) {
+ if (timeouts_remaining <= 0) {
+ // While not the best way to detach, this triggers interoperable logic that
+ // includes detachment.
+ element.srcObject = null;
+ test.step_timeout(() => { test.done(); }, 10);
+ } else {
+ test.step_timeout(() => {
+ detachElementAfterMultipleSetTimeouts(test, element, --timeouts_remaining);
+ }, 0);
+ }
+}
+
+function startWorkerAndDetachElement(test, when_to_start_timeouts, timeouts_to_await) {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker"), "MediaSource hasOwnProperty 'canConstructInDedicatedWorker'");
+ assert_true(MediaSource.canConstructInDedicatedWorker, "MediaSource.canConstructInDedicatedWorker");
+
+ const worker = new Worker("mediasource-worker-detach-element.js");
+ worker.onerror = test.unreached_func("worker error");
+
+ const video = document.createElement("video");
+ document.body.appendChild(video);
+
+ worker.onmessage = test.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, "message must have a subject field");
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached("Worker error: " + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ video.srcObject = handle;
+ if (when_to_start_timeouts == AFTER_SETTING_SRCOBJECT) {
+ detachElementAfterMultipleSetTimeouts(test, video, timeouts_to_await);
+ }
+ break;
+ case messageSubject.STARTED_BUFFERING:
+ if (when_to_start_timeouts == AFTER_STARTED_BUFFERING)
+ detachElementAfterMultipleSetTimeouts(test, video, timeouts_to_await);
+ break;
+ case messageSubject.FINISHED_BUFFERING:
+ if (when_to_start_timeouts == AFTER_FINISHED_BUFFERING)
+ detachElementAfterMultipleSetTimeouts(test, video, timeouts_to_await);
+ break;
+ default:
+ assert_unreached("Unrecognized message subject: " + subject);
+ }
+ });
+}
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.js
new file mode 100644
index 0000000000..54b1d815f2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-detach-element.js
@@ -0,0 +1,79 @@
+// This is similar to mediasource-worker-play.js, except that the buffering is
+// longer and done in tiny chunks to enable a better chance of the main thread
+// detaching the element while interesting buffering work is still occurring. To
+// assist the main thread understanding when the buffering has started already
+// or has completed already, we also perform extra messaging.
+importScripts("mediasource-worker-util.js");
+
+onmessage = function(evt) {
+ postMessage({ subject: messageSubject.ERROR, info: "No message expected by Worker" });
+};
+
+let util = new MediaSourceWorkerUtil();
+
+let sentStartedBufferingMessage = false;
+
+util.mediaSource.addEventListener("sourceopen", () => {
+ let sourceBuffer;
+ try {
+ sourceBuffer = util.mediaSource.addSourceBuffer(util.mediaMetadata.type);
+ } catch(e) {
+ // Detachment may have already begun, so allow exception here.
+ // TODO(https://crbug.com/878133): Consider a distinct readyState for the case
+ // where exception occurs due to "Worker MediaSource attachment is closing".
+ // That would assist API users and narrow the exception handling here.
+ return;
+ }
+
+ sourceBuffer.onerror = (err) => {
+ postMessage({ subject: messageSubject.ERROR, info: err });
+ };
+ util.mediaLoadPromise.then(mediaData => bufferInto(sourceBuffer, mediaData, 100, 0),
+ err => { postMessage({ subject: messageSubject.ERROR, info: err }) } );
+}, { once : true });
+
+let handle = util.mediaSource.handle;
+
+postMessage({ subject: messageSubject.HANDLE, info: handle }, { transfer: [handle] } );
+
+// Append increasingly large pieces at a time, starting/continuing at |position|.
+// This allows buffering the test media without timeout, but also with enough
+// operations to gain coverage on detachment concurrency with append.
+function bufferInto(sourceBuffer, mediaData, appendSize, position) {
+ if (position >= mediaData.byteLength) {
+ postMessage({ subject: messageSubject.FINISHED_BUFFERING });
+ try {
+ util.mediaSource.endOfStream();
+ } catch(e) {
+ // Detachment may have already begun, so allow exception here.
+ // TODO(https://crbug.com/878133): Consider a distinct readyState for the case
+ // where exception occurs due to "Worker MediaSource attachment is closing".
+ // That would assist API users and narrow the exception handling here.
+ // FALL-THROUGH - return.
+ }
+ return;
+ }
+
+ var nextPosition = position + appendSize;
+ const pieceToAppend = mediaData.slice(position, nextPosition);
+ position = nextPosition;
+ appendSize += 100;
+
+ sourceBuffer.addEventListener("updateend", () => {
+ if (!sentStartedBufferingMessage) {
+ postMessage({ subject: messageSubject.STARTED_BUFFERING});
+ sentStartedBufferingMessage = true;
+ }
+ bufferInto(sourceBuffer, mediaData, appendSize, position);
+ }, { once : true });
+
+ try {
+ sourceBuffer.appendBuffer(pieceToAppend);
+ } catch(e) {
+ // Detachment may have already begun, so allow exception here.
+ // TODO(https://crbug.com/878133): Consider a distinct readyState for the case
+ // where exception occurs due to "Worker MediaSource attachment is closing".
+ // That would assist API users and narrow the exception handling here.
+ // FALL-THROUGH - return.
+ }
+}
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.html
new file mode 100644
index 0000000000..c195775beb
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.html
@@ -0,0 +1,86 @@
+<!DOCTYPE html>
+<html>
+<title>Test MediaSource-in-Worker duration updates before and after HAVE_METADATA</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<body>
+<script>
+
+function awaitDuration(t, video, worker, requestingMessage, expectedDuration) {
+ let durationAwaiter = t.step_func(() => {
+ if ((!Number.isNaN(expectedDuration) && video.duration === expectedDuration) ||
+ (Number.isNaN(expectedDuration) && Number.isNaN(video.duration))) {
+ worker.postMessage({ subject: messageSubject.ACK_VERIFIED, info: requestingMessage });
+ return;
+ }
+
+ // Otherwise, wait for one or more 'durationchange' events to see if video
+ // eventually has the expectedDuration.
+ video.addEventListener('durationchange', durationAwaiter, { once: true });
+ });
+
+ durationAwaiter();
+}
+
+async_test(t => {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker"), "MediaSource hasOwnProperty 'canConstructInDedicatedWorker'");
+ assert_true(MediaSource.canConstructInDedicatedWorker, "MediaSource.canConstructInDedicatedWorker");
+
+ const video = document.createElement("video");
+ document.body.appendChild(video);
+ video.onerror = t.unreached_func("video element error");
+ video.onended = t.unreached_func("video element ended");
+ assert_equals(video.duration, NaN, "initial video duration before attachment should be NaN");
+ assert_equals(video.readyState, HTMLMediaElement.HAVE_NOTHING, "initial video readyState before attachment should be HAVE_NOTHING");
+
+ let worker = new Worker("mediasource-worker-duration.js");
+ worker.onerror = t.step_func(e => {
+ assert_unreached("worker error: [" + e.filename + ":" + e.lineno + ":" + e.colno + ":" + e.error + ":" + e.message + "]");
+ });
+ worker.onmessage = t.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject !== undefined, "message must have a subject field");
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached("Worker error: " + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ assert_equals(video.duration, NaN, "initial video duration before attachment should still be NaN");
+ assert_equals(video.readyState, HTMLMediaElement.HAVE_NOTHING,
+ "initial video readyState before attachment should still be HAVE_NOTHING");
+ video.srcObject = handle;
+ break;
+ case messageSubject.VERIFY_DURATION:
+ assert_equals(video.duration, e.data.info, "duration should match expectation");
+ worker.postMessage({ subject: messageSubject.ACK_VERIFIED, info: e.data });
+ break;
+ case messageSubject.AWAIT_DURATION:
+ awaitDuration(t, video, worker, e.data, e.data.info);
+ break;
+ case messageSubject.VERIFY_HAVE_NOTHING:
+ assert_equals(video.readyState, HTMLMediaElement.HAVE_NOTHING, "readyState should match expectation");
+ worker.postMessage({ subject: messageSubject.ACK_VERIFIED, info: e.data });
+ break;
+ case messageSubject.VERIFY_AT_LEAST_HAVE_METADATA:
+ assert_greater_than_equal(video.readyState, HTMLMediaElement.HAVE_METADATA, "readyState should match expectation");
+ worker.postMessage({ subject: messageSubject.ACK_VERIFIED, info: e.data });
+ break;
+ case messageSubject.WORKER_DONE:
+ // This test is a worker-driven set of verifications, and it will send
+ // this message when it is complete. See comment in the worker script
+ // that describes the phases of this test case.
+ assert_not_equals(video.srcObject, null, "test should at least have set srcObject.");
+ t.done();
+ break;
+ default:
+ assert_unreached("Unexpected message subject: " + subject);
+ }
+ });
+}, "Test worker MediaSource duration updates before and after HAVE_METADATA");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.js
new file mode 100644
index 0000000000..2a2c7bac0b
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-duration.js
@@ -0,0 +1,290 @@
+importScripts("mediasource-worker-util.js");
+
+// Note, we do not use testharness.js utilities within the worker context
+// because it also communicates using postMessage to the main HTML document's
+// harness, and would confuse the test case message parsing there.
+
+let util = new MediaSourceWorkerUtil();
+let sourceBuffer;
+
+// Phases of this test case, in sequence:
+const testPhase = {
+ // Main thread verifies initial unattached HTMLMediaElement duration is NaN
+ // and readyState is HAVE_NOTHING, then starts this worker.
+ // This worker creates a MediaSource, verifies its initial duration
+ // is NaN, creates an object URL for the MediaSource and sends the URL to the
+ // main thread.
+ kInitial: "Initial",
+
+ // Main thread receives MediaSourceHandle, re-verifies that the media element
+ // duration is still NaN and readyState is still HAVE_NOTHING, and then sets
+ // the handle as the srcObject of the media element, eventually causing worker
+ // mediaSource 'onsourceopen' event dispatch.
+ kAttaching: "Awaiting sourceopen event that signals attachment is setup",
+
+ kRequestNaNDurationCheck:
+ "Sending request to main thread to verify expected duration of the freshly setup attachment",
+ kConfirmNaNDurationResult:
+ "Checking that main thread correctly ACK'ed the freshly setup attachment's duration verification request",
+
+ kRequestHaveNothingReadyStateCheck:
+ "Sending request to main thread to verify expected readyState of HAVE_NOTHING of the freshly setup attachment",
+ kConfirmHaveNothingReadyStateResult:
+ "Checking that main thread correctly ACK'ed the freshly setup attachment's readyState HAVE_NOTHING verification request",
+
+ kRequestSetDurationCheck:
+ "Sending request to main thread to verify explicitly set duration before any media data has been appended",
+ kConfirmSetDurationResult:
+ "Checking that main thread correctly ACK'ed the duration verification request of explicitly set duration before any media data has been appended",
+
+ kRequestHaveNothingReadyStateRecheck:
+ "Sending request to main thread to recheck that the readyState is still HAVE_NOTHING",
+ kConfirmHaveNothingReadyStateRecheckResult:
+ "Checking that main thread correctly ACK'ed the request to recheck readyState of HAVE_NOTHING",
+
+ kRequestAwaitNewDurationCheck:
+ "Buffering media and then sending request to main thread to await duration reaching the expected value due to buffering",
+ kConfirmAwaitNewDurationResult:
+ "Checking that main thread correctly ACK'ed the request to await duration reaching the expected value due to buffering",
+
+ kRequestAtLeastHaveMetadataReadyStateCheck:
+ "Sending request to main thread to verify expected readyState of at least HAVE_METADATA due to buffering",
+ kConfirmAtLeastHaveMetadataReadyStateResult:
+ "Checking that main thread correctly ACK'ed the request to verify expected readyState of at least HAVE_METADATA due to buffering",
+
+};
+
+let phase = testPhase.kInitial;
+
+// Setup handler for receipt of attachment completion.
+util.mediaSource.addEventListener("sourceopen", () => {
+ assert(phase === testPhase.kAttaching, "Unexpected sourceopen received by Worker mediaSource.");
+ phase = testPhase.kRequestNaNDurationCheck;
+ processPhase();
+}, { once : true });
+
+// Setup handler for receipt of acknowledgement of successful verifications from
+// main thread. |ackVerificationData| contains the round-tripped verification
+// request that the main thread just sent, and is used in processPhase to ensure
+// the ACK for this phase matched the request for verification.
+let ackVerificationData;
+onmessage = e => {
+ if (e.data === undefined || e.data.subject !== messageSubject.ACK_VERIFIED || e.data.info === undefined) {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "Invalid message received by Worker"
+ });
+ return;
+ }
+
+ ackVerificationData = e.data.info;
+ processPhase(/* isResponseToAck */ true);
+};
+
+processPhase();
+
+
+// Returns true if checks succeed, false otherwise.
+function checkAckVerificationData(expectedRequest) {
+
+ // Compares only subject and info fields. Uses logic similar to testharness.js's
+ // same_value(x,y) to correctly handle NaN, but doesn't distinguish +0 from -0.
+ function messageValuesEqual(m1, m2) {
+ if (m1.subject !== m1.subject) {
+ // NaN case
+ if (m2.subject === m2.subject)
+ return false;
+ } else if (m1.subject !== m2.subject) {
+ return false;
+ }
+
+ if (m1.info !== m1.info) {
+ // NaN case
+ return (m2.info !== m2.info);
+ }
+
+ return m1.info === m2.info;
+ }
+
+ if (messageValuesEqual(expectedRequest, ackVerificationData)) {
+ ackVerificationData = undefined;
+ return true;
+ }
+
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "ACK_VERIFIED message from main thread was for a mismatching request for this phase. phase=[" + phase +
+ "], expected request that would produce ACK in this phase=[" + JSON.stringify(expectedRequest) +
+ "], actual request reported with the ACK=[" + JSON.stringify(ackVerificationData) + "]"
+ });
+
+ ackVerificationData = undefined;
+ return false;
+}
+
+function bufferMediaAndSendDurationVerificationRequest() {
+ sourceBuffer = util.mediaSource.addSourceBuffer(util.mediaMetadata.type);
+ sourceBuffer.onerror = (err) => {
+ postMessage({ subject: messageSubject.ERROR, info: err });
+ };
+ sourceBuffer.onupdateend = () => {
+ // Sanity check the duration.
+ // Unnecessary for this buffering, except helps with test coverage.
+ var duration = util.mediaSource.duration;
+ if (isNaN(duration) || duration <= 0.0) {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "mediaSource.duration " + duration + " is not within expected range (0,1)"
+ });
+ return;
+ }
+
+ // Await the main thread media element duration matching the worker
+ // mediaSource duration.
+ postMessage(getAwaitCurrentDurationRequest());
+ };
+
+ util.mediaLoadPromise.then(mediaData => { sourceBuffer.appendBuffer(mediaData); },
+ err => { postMessage({ subject: messageSubject.ERROR, info: err }) });
+}
+
+
+function getAwaitCurrentDurationRequest() {
+ // Sanity check that we have a numeric duration value now.
+ const dur = util.mediaSource.duration;
+ assert(!Number.isNaN(dur), "Unexpected NaN duration in worker");
+ return { subject: messageSubject.AWAIT_DURATION, info: dur };
+}
+
+function assert(conditionBool, description) {
+ if (conditionBool !== true) {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "Current test phase [" + phase + "] failed worker assertion. " + description
+ });
+ }
+}
+
+function processPhase(isResponseToAck = false) {
+ assert(!isResponseToAck || (phase !== testPhase.kInitial && phase !== testPhase.kAttaching),
+ "Phase does not expect verification ack receipt from main thread");
+
+ // Some static request messages useful in transmission and ACK verification.
+ const nanDurationCheckRequest = { subject: messageSubject.VERIFY_DURATION, info: NaN };
+ const haveNothingReadyStateCheckRequest = { subject: messageSubject.VERIFY_HAVE_NOTHING };
+ const setDurationCheckRequest = { subject: messageSubject.AWAIT_DURATION, info: 0.1 };
+ const atLeastHaveMetadataReadyStateCheckRequest = { subject: messageSubject.VERIFY_AT_LEAST_HAVE_METADATA };
+
+ switch (phase) {
+
+ case testPhase.kInitial:
+ assert(Number.isNaN(util.mediaSource.duration), "Initial unattached MediaSource duration must be NaN, but instead is " + util.mediaSource.duration);
+ phase = testPhase.kAttaching;
+ let handle = util.mediaSource.handle;
+ postMessage({ subject: messageSubject.HANDLE, info: handle }, { transfer: [handle] } );
+ break;
+
+ case testPhase.kAttaching:
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "kAttaching phase is handled by main thread and by worker onsourceopen, not this switch case."
+ });
+ break;
+
+ case testPhase.kRequestNaNDurationCheck:
+ assert(!isResponseToAck);
+ postMessage(nanDurationCheckRequest);
+ phase = testPhase.kConfirmNaNDurationResult;
+ break;
+
+ case testPhase.kConfirmNaNDurationResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(nanDurationCheckRequest)) {
+ phase = testPhase.kRequestHaveNothingReadyStateCheck;
+ processPhase();
+ }
+ break;
+
+ case testPhase.kRequestHaveNothingReadyStateCheck:
+ assert(!isResponseToAck);
+ postMessage(haveNothingReadyStateCheckRequest);
+ phase = testPhase.kConfirmHaveNothingReadyStateResult;
+ break;
+
+ case testPhase.kConfirmHaveNothingReadyStateResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(haveNothingReadyStateCheckRequest)) {
+ phase = testPhase.kRequestSetDurationCheck;
+ processPhase();
+ }
+ break;
+
+ case testPhase.kRequestSetDurationCheck:
+ assert(!isResponseToAck);
+ const newDuration = setDurationCheckRequest.info;
+ assert(!Number.isNaN(newDuration) && newDuration > 0);
+
+ // Set the duration, then request verification.
+ util.mediaSource.duration = newDuration;
+ postMessage(setDurationCheckRequest);
+ phase = testPhase.kConfirmSetDurationResult;
+ break;
+
+ case testPhase.kConfirmSetDurationResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(setDurationCheckRequest)) {
+ phase = testPhase.kRequestHaveNothingReadyStateRecheck;
+ processPhase();
+ }
+ break;
+
+ case testPhase.kRequestHaveNothingReadyStateRecheck:
+ assert(!isResponseToAck);
+ postMessage(haveNothingReadyStateCheckRequest);
+ phase = testPhase.kConfirmHaveNothingReadyStateRecheckResult;
+ break;
+
+ case testPhase.kConfirmHaveNothingReadyStateRecheckResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(haveNothingReadyStateCheckRequest)) {
+ phase = testPhase.kRequestAwaitNewDurationCheck;
+ processPhase();
+ }
+ break;
+
+ case testPhase.kRequestAwaitNewDurationCheck:
+ assert(!isResponseToAck);
+ bufferMediaAndSendDurationVerificationRequest();
+ phase = testPhase.kConfirmAwaitNewDurationResult;
+ break;
+
+ case testPhase.kConfirmAwaitNewDurationResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(getAwaitCurrentDurationRequest())) {
+ phase = testPhase.kRequestAtLeastHaveMetadataReadyStateCheck;
+ processPhase();
+ }
+ break;
+
+ case testPhase.kRequestAtLeastHaveMetadataReadyStateCheck:
+ assert(!isResponseToAck);
+ postMessage(atLeastHaveMetadataReadyStateCheckRequest);
+ phase = testPhase.kConfirmAtLeastHaveMetadataReadyStateResult;
+ break;
+
+ case testPhase.kConfirmAtLeastHaveMetadataReadyStateResult:
+ assert(isResponseToAck);
+ if (checkAckVerificationData(atLeastHaveMetadataReadyStateCheckRequest)) {
+ postMessage({ subject: messageSubject.WORKER_DONE });
+ }
+ phase = "No further phase processing should occur once WORKER_DONE message has been sent";
+ break;
+
+ default:
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "Unexpected test phase in worker:" + phase,
+ });
+ }
+
+}
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-get-objecturl.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-get-objecturl.js
new file mode 100644
index 0000000000..e9a5af6c81
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-get-objecturl.js
@@ -0,0 +1,13 @@
+importScripts("mediasource-worker-util.js");
+
+// Note, we do not use testharness.js utilities within the worker context
+// because it also communicates using postMessage to the main HTML document's
+// harness, and would confuse the test case message parsing there.
+
+onmessage = function(evt) {
+ postMessage({ subject: messageSubject.ERROR, info: "No message expected by Worker"});
+};
+
+let util = new MediaSourceWorkerUtil();
+
+postMessage({ subject: messageSubject.OBJECT_URL, info: URL.createObjectURL(util.mediaSource) });
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer-to-main.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer-to-main.js
new file mode 100644
index 0000000000..15cccb1a0e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer-to-main.js
@@ -0,0 +1,10 @@
+importScripts('mediasource-message-util.js');
+
+// Note, we do not use testharness.js utilities within the worker context
+// because it also communicates using postMessage to the main HTML document's
+// harness, and would confuse the test case message parsing there.
+
+// Just obtain a MediaSourceHandle and transfer it to creator of our context.
+let handle = new MediaSource().handle;
+postMessage(
+ {subject: messageSubject.HANDLE, info: handle}, {transfer: [handle]});
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.html
new file mode 100644
index 0000000000..2db71c049d
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.html
@@ -0,0 +1,316 @@
+<!DOCTYPE html>
+<html>
+<title>Test MediaSourceHandle transfer characteristics</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<body>
+<script>
+
+function assert_mseiw_supported() {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(
+ MediaSource.hasOwnProperty('canConstructInDedicatedWorker'),
+ 'MediaSource hasOwnProperty \'canConstructInDedicatedWorker\'');
+ assert_true(
+ MediaSource.canConstructInDedicatedWorker,
+ 'MediaSource.canConstructInDedicatedWorker');
+ assert_true(
+ window.hasOwnProperty('MediaSourceHandle'),
+ 'window must have MediaSourceHandle visibility');
+}
+
+function get_handle_from_new_worker(
+ t, script = 'mediasource-worker-handle-transfer-to-main.js') {
+ return new Promise((r) => {
+ let worker = new Worker(script);
+ worker.addEventListener('message', t.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, 'message must have a subject field');
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached('Worker error: ' + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ assert_not_equals(
+ handle, null, 'must have a non-null MediaSourceHandle');
+ r({worker, handle});
+ break;
+ default:
+ assert_unreached('Unexpected message subject: ' + subject);
+ }
+ }));
+ });
+}
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle);
+ }, 'serializing handle without transfer');
+}, 'MediaSourceHandle serialization without transfer must fail, tested in window context');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle, handle]);
+ }, 'transferring same handle more than once in same postMessage');
+}, 'Same MediaSourceHandle transferred multiple times in single postMessage must fail, tested in window context');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+
+ // Transferring handle to worker without including it in the message is still
+ // a valid transfer, though the recipient will not be able to obtain the
+ // handle itself. Regardless, the handle in this sender's context will be
+ // detached.
+ worker.postMessage(null, [handle]);
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(null, [handle]);
+ }, 'transferring handle that was already detached should fail');
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that was already detached should fail, even if this time it\'s included in the message');
+}, 'Attempt to transfer detached MediaSourceHandle must fail, tested in window context');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+
+ let video = document.createElement('video');
+ document.body.appendChild(video);
+ video.srcObject = handle;
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that is currently srcObject fails');
+ assert_equals(video.srcObject, handle);
+
+ // Clear |handle| from being the srcObject value.
+ video.srcObject = null;
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that was briefly srcObject before srcObject was reset to null should also fail');
+ assert_equals(video.srcObject, null);
+}, 'MediaSourceHandle cannot be transferred, immediately after set as srcObject, even if srcObject immediately reset to null');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+
+ let video = document.createElement('video');
+ document.body.appendChild(video);
+ video.srcObject = handle;
+ assert_not_equals(video.networkState, HTMLMediaElement.NETWORK_LOADING);
+ // Initial step of resource selection algorithm sets networkState to
+ // NETWORK_NO_SOURCE. networkState only becomes NETWORK_LOADING after stable
+ // state awaited and resource selection algorithm continues with, in this
+ // case, an assigned media provider object (which is the MediaSource
+ // underlying the handle).
+ assert_equals(video.networkState, HTMLMediaElement.NETWORK_NO_SOURCE);
+
+ // Wait until 'loadstart' media element event is dispatched.
+ await new Promise((r) => {
+ video.addEventListener(
+ 'loadstart', t.step_func(e => {
+ r();
+ }),
+ {once: true});
+ });
+ assert_equals(video.networkState, HTMLMediaElement.NETWORK_LOADING);
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that is currently srcObject, after loadstart, fails');
+ assert_equals(video.srcObject, handle);
+
+ // Clear |handle| from being the srcObject value.
+ video.srcObject = null;
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that was srcObject until \'loadstart\' when srcObject was reset to null should also fail');
+ assert_equals(video.srcObject, null);
+}, 'MediaSourceHandle cannot be transferred, if it was srcObject when asynchronous load starts (loadstart), even if srcObject is then immediately reset to null');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ let {worker, handle} = await get_handle_from_new_worker(t);
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+
+ let video = document.createElement('video');
+ document.body.appendChild(video);
+
+ // Transfer the handle away so that our instance of it is detached.
+ worker.postMessage(null, [handle]);
+
+ // Now assign handle to srcObject to attempt load. 'loadstart' event should
+ // occur, but then media element error should occur due to failure to attach
+ // to the underlying MediaSource of a detached MediaSourceHandle.
+
+ video.srcObject = handle;
+ assert_equals(
+ video.networkState, HTMLMediaElement.NETWORK_NO_SOURCE,
+ 'before async load start, networkState should be NETWORK_NO_SOURCE');
+
+ // Before 'loadstart' dispatch, we don't expect the media element error.
+ video.onerror = t.unreached_func(
+ 'Error is unexpected before \'loadstart\' event dispatch');
+
+ // Wait until 'loadstart' media element event is dispatched.
+ await new Promise((r) => {
+ video.addEventListener(
+ 'loadstart', t.step_func(e => {
+ r();
+ }),
+ {once: true});
+ });
+
+ // Now wait until 'error' media element event is dispatched.
+ video.onerror = null;
+ await new Promise((r) => {
+ video.addEventListener(
+ 'error', t.step_func(e => {
+ r();
+ }),
+ {once: true});
+ });
+
+ // Confirm expected error and states resulting from the "dedicated media
+ // source failure steps":
+ // https://html.spec.whatwg.org/multipage/media.html#dedicated-media-source-failure-steps
+ let e = video.error;
+ assert_true(e instanceof MediaError);
+ assert_equals(e.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+ assert_equals(
+ video.readyState, HTMLMediaElement.HAVE_NOTHING,
+ 'load failure should occur long before parsing any appended metadata.');
+ assert_equals(video.networkState, HTMLMediaElement.NETWORK_NO_SOURCE);
+
+ // Even if the handle is detached and attempt to load it failed, the handle is
+ // still detached, and as well, has also been used as srcObject now. Re-verify
+ // that such a handle instance must fail transfer attempt.
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring detached handle that is currently srcObject, after loadstart and load failure, fails');
+ assert_equals(video.srcObject, handle);
+
+ // Clear |handle| from being the srcObject value.
+ video.srcObject = null;
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring detached handle that was srcObject until \'loadstart\' and load failure when srcObject was reset to null should also fail');
+ assert_equals(video.srcObject, null);
+}, 'A detached (already transferred away) MediaSourceHandle cannot successfully load when assigned to srcObject');
+
+promise_test(async t => {
+ assert_mseiw_supported();
+ // Get a handle from a worker that is prepared to buffer real media once its
+ // MediaSource instance attaches and 'sourceopen' is dispatched. Unlike
+ // earlier cases in this file, we need positive indication from precisely one
+ // of multiple media elements that the attachment and playback succeeded.
+ let {worker, handle} =
+ await get_handle_from_new_worker(t, 'mediasource-worker-play.js');
+ assert_true(
+ handle instanceof MediaSourceHandle, 'must be a MediaSourceHandle');
+
+ let videos = [];
+ const NUM_ELEMENTS = 5;
+ for (let i = 0; i < NUM_ELEMENTS; ++i) {
+ let v = document.createElement('video');
+ videos.push(v);
+ document.body.appendChild(v);
+ }
+
+ await new Promise((r) => {
+ let errors = 0;
+ let endeds = 0;
+
+ // Setup handlers to expect precisely 1 ended and N-1 errors.
+ videos.forEach((v) => {
+ v.addEventListener(
+ 'error', t.step_func(e => {
+ // Confirm expected error and states resulting from the "dedicated
+ // media source failure steps":
+ // https://html.spec.whatwg.org/multipage/media.html#dedicated-media-source-failure-steps
+ let err = v.error;
+ assert_true(err instanceof MediaError);
+ assert_equals(err.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+ assert_equals(
+ v.readyState, HTMLMediaElement.HAVE_NOTHING,
+ 'load failure should occur long before parsing any appended metadata.');
+ assert_equals(v.networkState, HTMLMediaElement.NETWORK_NO_SOURCE);
+
+ errors++;
+ if (errors + endeds == videos.length && endeds == 1)
+ r();
+ }),
+ {once: true});
+ v.addEventListener(
+ 'ended', t.step_func(e => {
+ endeds++;
+ if (errors + endeds == videos.length && endeds == 1)
+ r();
+ }),
+ {once: true});
+ v.srcObject = handle;
+ assert_equals(
+ v.networkState, HTMLMediaElement.NETWORK_NO_SOURCE,
+ 'before async load start, networkState should be NETWORK_NO_SOURCE');
+ });
+
+ let playPromises = [];
+ videos.forEach((v) => {
+ playPromises.push(v.play());
+ });
+
+ // Ignore playPromise success/rejection, if any.
+ playPromises.forEach((p) => {
+ if (p !== undefined) {
+ p.then(_ => {}).catch(_ => {});
+ }
+ });
+ });
+
+ // Once the handle has been assigned as srcObject, it must fail transfer
+ // steps.
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that is currently srcObject on multiple elements, fails');
+ videos.forEach((v) => {
+ assert_equals(v.srcObject, handle);
+ v.srcObject = null;
+ });
+
+ assert_throws_dom('DataCloneError', function() {
+ worker.postMessage(handle, [handle]);
+ }, 'transferring handle that was srcObject on multiple elements, then was unset on them, should also fail');
+ videos.forEach((v) => {
+ assert_equals(v.srcObject, null);
+ });
+}, 'Precisely one load of the same MediaSourceHandle assigned synchronously to multiple media element srcObjects succeeds');
+
+fetch_tests_from_worker(new Worker('mediasource-worker-handle-transfer.js'));
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.js
new file mode 100644
index 0000000000..803da44e23
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle-transfer.js
@@ -0,0 +1,19 @@
+importScripts('/resources/testharness.js');
+
+test(t => {
+ let handle = new MediaSource().handle;
+ assert_true(handle instanceof MediaSourceHandle);
+ assert_throws_dom('DataCloneError', function() {
+ postMessage(handle);
+ }, 'serializing handle without transfer');
+}, 'MediaSourceHandle serialization without transfer must fail, tested in worker');
+
+test(t => {
+ let handle = new MediaSource().handle;
+ assert_true(handle instanceof MediaSourceHandle);
+ assert_throws_dom('DataCloneError', function() {
+ postMessage(handle, [handle, handle]);
+ }, 'transferring same handle more than once in same postMessage');
+}, 'Same MediaSourceHandle transferred multiple times in single postMessage must fail, tested in worker');
+
+done();
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.html
new file mode 100644
index 0000000000..6129e05ffb
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<html>
+<title>Test MediaSource object and handle creation, with MediaSource in dedicated worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<script>
+
+async_test(t => {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker"), "MediaSource hasOwnProperty 'canConstructInDedicatedWorker'");
+ assert_true(MediaSource.canConstructInDedicatedWorker, "MediaSource.canConstructInDedicatedWorker");
+ assert_true(window.hasOwnProperty("MediaSourceHandle"), "window must have MediaSourceHandle visibility");
+
+ let worker = new Worker("mediasource-worker-play.js");
+ worker.onmessage = t.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, "message must have a subject field");
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached("Worker error: " + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ assert_not_equals(handle, null, "must have a non-null MediaSourceHandle");
+ assert_true(handle instanceof MediaSourceHandle, "must be a MediaSourceHandle");
+ t.done();
+ break;
+ default:
+ assert_unreached("Unexpected message subject: " + subject);
+
+ }
+ });
+}, "Test main context receipt of postMessage'd MediaSourceHandle from DedicatedWorker MediaSource");
+
+test(t => {
+ assert_true(window.hasOwnProperty("MediaSourceHandle"), "window must have MediaSourceHandle");
+}, "Test main-thread has MediaSourceHandle defined");
+
+test(t => {
+ // Note, MSE spec may eventually describe how a main-thread MediaSource can
+ // attach to an HTMLMediaElement using a MediaSourceHandle. For now, we
+ // ensure that the implementation of this is not available per current spec.
+ assert_false(
+ "handle" in MediaSource.prototype,
+ "window MediaSource must not have handle in prototype");
+}, "Test main-thread MediaSource does not have handle getter");
+
+if (MediaSource.hasOwnProperty("canConstructInDedicatedWorker") && MediaSource.canConstructInDedicatedWorker === true) {
+ // If implementation claims support for MSE-in-Workers, then fetch and run
+ // some tests directly in another dedicated worker and get their results
+ // merged into those from this page.
+ fetch_tests_from_worker(new Worker("mediasource-worker-handle.js"));
+} else {
+ // Otherwise, fetch and run a test that verifies lack of support of
+ // MediaSource construction in another dedicated worker.
+ fetch_tests_from_worker(new Worker("mediasource-worker-must-fail-if-unsupported.js"));
+}
+
+</script>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.js
new file mode 100644
index 0000000000..d35cb877c2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-handle.js
@@ -0,0 +1,70 @@
+importScripts("/resources/testharness.js");
+
+test(t => {
+ // The Window test html conditionally fetches and runs these tests only if the
+ // implementation exposes a true-valued static canConstructInDedicatedWorker
+ // attribute on MediaSource in the Window context. So, the implementation must
+ // agree on support here in the dedicated worker context.
+
+ // Ensure we're executing in a dedicated worker context.
+ assert_true(self instanceof DedicatedWorkerGlobalScope, "self instanceof DedicatedWorkerGlobalScope");
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker", "DedicatedWorker MediaSource hasOwnProperty 'canConstructInDedicatedWorker'"));
+ assert_true(MediaSource.canConstructInDedicatedWorker, "DedicatedWorker MediaSource.canConstructInDedicatedWorker");
+}, "MediaSource in DedicatedWorker context must have true-valued canConstructInDedicatedWorker if Window context had it");
+
+test(t => {
+ assert_true(
+ 'handle' in MediaSource.prototype,
+ 'dedicated worker MediaSource must have handle in prototype');
+ assert_true(self.hasOwnProperty("MediaSourceHandle"), "dedicated worker must have MediaSourceHandle visibility");
+}, 'MediaSource prototype in DedicatedWorker context must have \'handle\', and worker must have MediaSourceHandle');
+
+test(t => {
+ const ms = new MediaSource();
+ assert_equals(ms.readyState, "closed");
+}, "MediaSource construction succeeds with initial closed readyState in DedicatedWorker");
+
+test(t => {
+ const ms = new MediaSource();
+ const handle = ms.handle;
+ assert_not_equals(handle, null, 'must have a non-null \'handle\' attribute');
+ assert_true(handle instanceof MediaSourceHandle, "must be a MediaSourceHandle");
+}, 'mediaSource.handle in DedicatedWorker returns a MediaSourceHandle');
+
+test(t => {
+ const msA = new MediaSource();
+ const msB = new MediaSource();
+ const handleA1 = msA.handle;
+ const handleA2 = msA.handle;
+ const handleA3 = msA['handle'];
+ const handleB1 = msB.handle;
+ const handleB2 = msB.handle;
+ assert_true(
+ handleA1 === handleA2 && handleB1 === handleB2 && handleA1 != handleB1,
+ 'SameObject is observed for mediaSource.handle, and different MediaSource instances have different handles');
+ assert_true(
+ handleA1 === handleA3,
+ 'SameObject is observed even when accessing handle differently');
+ assert_true(
+ handleA1 instanceof MediaSourceHandle &&
+ handleB1 instanceof MediaSourceHandle,
+ 'handle property returns MediaSourceHandles');
+}, 'mediaSource.handle observes SameObject property correctly');
+
+test(t => {
+ const ms1 = new MediaSource();
+ const handle1 = ms1.handle;
+ const ms2 = new MediaSource();
+ const handle2 = ms2.handle;
+ assert_true(
+ handle1 !== handle2,
+ 'distinct MediaSource instances must have distinct handles');
+
+ // Verify attempt to change value of the handle property does not succeed.
+ ms1.handle = handle2;
+ assert_true(
+ ms1.handle === handle1 && ms2.handle === handle2,
+ 'MediaSource handle is readonly, so should not have changed');
+}, 'Attempt to set MediaSource handle property should fail to change it, since it is readonly');
+
+done();
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-must-fail-if-unsupported.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-must-fail-if-unsupported.js
new file mode 100644
index 0000000000..69c65f6aa2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-must-fail-if-unsupported.js
@@ -0,0 +1,18 @@
+importScripts("/resources/testharness.js");
+
+test(t => {
+ // The Window test html conditionally fetches and runs these tests only if the
+ // implementation does not have a true-valued static
+ // canConstructInDedicatedWorker property on MediaSource in the Window
+ // context. So, the implementation must agree on lack of support here in the
+ // dedicated worker context.
+
+ // Ensure we're executing in a dedicated worker context.
+ assert_true(self instanceof DedicatedWorkerGlobalScope, "self instanceof DedicatedWorkerGlobalScope");
+ assert_true(self.MediaSource === undefined, "MediaSource is undefined in DedicatedWorker");
+ assert_throws_js(ReferenceError,
+ function() { var ms = new MediaSource(); },
+ "MediaSource construction in DedicatedWorker throws exception");
+}, "MediaSource construction in DedicatedWorker context must fail if Window context did not claim MSE supported in DedicatedWorker");
+
+done();
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.html
new file mode 100644
index 0000000000..ae60199672
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<html>
+<title>Test MediaSource object and objectUrl creation and load via that url should fail, with MediaSource in dedicated worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<script>
+
+async_test(t => {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker"), "MediaSource hasOwnProperty 'canConstructInDedicatedWorker'");
+ assert_true(MediaSource.canConstructInDedicatedWorker, "MediaSource.canConstructInDedicatedWorker");
+
+ let worker = new Worker("mediasource-worker-get-objecturl.js");
+ worker.onmessage = t.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, "message must have a subject field");
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached("Worker error: " + e.data.info);
+ break;
+ case messageSubject.OBJECT_URL:
+ const url = e.data.info;
+ const video = document.createElement("video");
+ document.body.appendChild(video);
+ video.onerror = t.step_func((target) => {
+ assert_true(video.error != null);
+ assert_equals(video.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+ t.done();
+ });
+ video.onended = t.unreached_func("video should not have successfully loaded and played to end");
+ video.src = url;
+ break;
+ default:
+ assert_unreached("Unexpected message subject: " + subject);
+ }
+ });
+}, "Test main context load of a DedicatedWorker MediaSource object URL should fail");
+
+if (MediaSource.hasOwnProperty("canConstructInDedicatedWorker") && MediaSource.canConstructInDedicatedWorker === true) {
+ // If implementation claims support for MSE-in-Workers, then fetch and run
+ // some tests directly in another dedicated worker and get their results
+ // merged into those from this page.
+ fetch_tests_from_worker(new Worker("mediasource-worker-objecturl.js"));
+} else {
+ // Otherwise, fetch and run a test that verifies lack of support of
+ // MediaSource construction in another dedicated worker.
+ fetch_tests_from_worker(new Worker("mediasource-worker-must-fail-if-unsupported.js"));
+}
+
+</script>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.js
new file mode 100644
index 0000000000..2e70d99418
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-objecturl.js
@@ -0,0 +1,33 @@
+importScripts("/resources/testharness.js");
+
+test(t => {
+ // The Window test html conditionally fetches and runs these tests only if the
+ // implementation exposes a true-valued static canConstructInDedicatedWorker
+ // attribute on MediaSource in the Window context. So, the implementation must
+ // agree on support here in the dedicated worker context.
+
+ // Ensure we're executing in a dedicated worker context.
+ assert_true(self instanceof DedicatedWorkerGlobalScope, "self instanceof DedicatedWorkerGlobalScope");
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker", "DedicatedWorker MediaSource hasOwnProperty 'canConstructInDedicatedWorker'"));
+ assert_true(MediaSource.canConstructInDedicatedWorker, "DedicatedWorker MediaSource.canConstructInDedicatedWorker");
+}, "MediaSource in DedicatedWorker context must have true-valued canConstructInDedicatedWorker if Window context had it");
+
+test(t => {
+ const ms = new MediaSource();
+ assert_equals(ms.readyState, "closed");
+}, "MediaSource construction succeeds with initial closed readyState in DedicatedWorker");
+
+test(t => {
+ const ms = new MediaSource();
+ const url = URL.createObjectURL(ms);
+}, "URL.createObjectURL(mediaSource) in DedicatedWorker does not throw exception");
+
+test(t => {
+ const ms = new MediaSource();
+ const url1 = URL.createObjectURL(ms);
+ const url2 = URL.createObjectURL(ms);
+ URL.revokeObjectURL(url1);
+ URL.revokeObjectURL(url2);
+}, "URL.revokeObjectURL(mediaSource) in DedicatedWorker with two url for same MediaSource");
+
+done();
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.html
new file mode 100644
index 0000000000..d6496afd6f
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.html
@@ -0,0 +1,85 @@
+<!DOCTYPE html>
+<html>
+<title>MediaSource-in-Worker looped playback test case with worker termination at various places</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<body>
+<script>
+
+function terminateWorkerAfterMultipleSetTimeouts(test, worker, timeouts_remaining) {
+ if (timeouts_remaining <= 0) {
+ worker.terminate();
+ test.step_timeout(() => { test.done(); }, 0);
+ } else {
+ test.step_timeout(() => {
+ terminateWorkerAfterMultipleSetTimeouts(test, worker, --timeouts_remaining);
+ }, 0);
+ }
+}
+
+function startWorkerAndTerminateWorker(test, when_to_start_timeouts, timeouts_to_await) {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(MediaSource.hasOwnProperty("canConstructInDedicatedWorker"), "MediaSource hasOwnProperty 'canConstructInDedicatedWorker'");
+ assert_true(MediaSource.canConstructInDedicatedWorker, "MediaSource.canConstructInDedicatedWorker");
+
+ const worker = new Worker("mediasource-worker-play-terminate-worker.js");
+ worker.onerror = test.unreached_func("worker error");
+
+ const video = document.createElement("video");
+ document.body.appendChild(video);
+ video.onerror = test.unreached_func("video element error");
+
+ if (when_to_start_timeouts == "after first ended event") {
+ video.addEventListener("ended", test.step_func(() => {
+ terminateWorkerAfterMultipleSetTimeouts(test, worker, timeouts_to_await);
+ video.currentTime = 0;
+ video.loop = true;
+ }), { once : true });
+ } else {
+ video.loop = true;
+ }
+
+ if (when_to_start_timeouts == "before setting srcObject") {
+ terminateWorkerAfterMultipleSetTimeouts(test, worker, timeouts_to_await);
+ }
+
+ worker.onmessage = test.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, "message must have a subject field");
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached("Worker error: " + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ video.srcObject = handle;
+ if (when_to_start_timeouts == "after setting srcObject") {
+ terminateWorkerAfterMultipleSetTimeouts(test, worker, timeouts_to_await);
+ }
+ video.play().catch(error => {
+ // Only rejections due to MEDIA_ERR_SRC_NOT_SUPPORTED are expected to possibly
+ // occur, except if we expect to reach at least 1 'ended' event.
+ assert_not_equals(when_to_start_timeouts, "after first ended event");
+ assert_true(video.error != null);
+ assert_equals(video.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+ // Do not rethrow. Instead, wait for the step_timeouts to finish the test.
+ });
+ break;
+ default:
+ assert_unreached("Unexpected message subject: " + subject);
+ }
+ });
+}
+
+[ "before setting srcObject", "after setting srcObject", "after first ended event" ].forEach(when => {
+ for (let timeouts = 0; timeouts < 10; ++timeouts) {
+ async_test(test => { startWorkerAndTerminateWorker(test, when, timeouts); },
+ "Test worker MediaSource termination after at least " + timeouts +
+ " main thread setTimeouts, starting counting " + when);
+ }
+});
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.js
new file mode 100644
index 0000000000..b453818191
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play-terminate-worker.js
@@ -0,0 +1,15 @@
+// This worker script is intended to be used by the
+// mediasource-worker-play-terminate-worker.html test case. The script import
+// may itself be terminated by the main thread terminating our context,
+// producing a NetworkError, so we catch and ignore a NetworkError here. Note
+// that any dependency on globals defined in the imported scripts may result in
+// test harness error flakiness if an undefined variable (due to termination
+// causing importScripts to fail) is accessed. Hence this script just imports
+// and handles import errors, since such nondeterministic worker termination is
+// central to the test case.
+try {
+ importScripts("mediasource-worker-play.js");
+} catch(e) {
+ if (e.name != "NetworkError")
+ throw e;
+}
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.html b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.html
new file mode 100644
index 0000000000..455a224069
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.html
@@ -0,0 +1,49 @@
+<!DOCTYPE html>
+<html>
+<title>Simple MediaSource-in-Worker playback test case</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-message-util.js"></script>
+<body>
+<script>
+
+async_test(t => {
+ // Fail fast if MSE-in-Workers is not supported.
+ assert_true(
+ MediaSource.hasOwnProperty('canConstructInDedicatedWorker'),
+ 'MediaSource hasOwnProperty \'canConstructInDedicatedWorker\'');
+ assert_true(
+ MediaSource.canConstructInDedicatedWorker,
+ 'MediaSource.canConstructInDedicatedWorker');
+
+ const video = document.createElement('video');
+ document.body.appendChild(video);
+ video.onerror = t.unreached_func('video element error');
+ video.onended = t.step_func_done();
+
+ let worker = new Worker('mediasource-worker-play.js');
+ worker.onerror = t.unreached_func('worker error');
+ worker.onmessage = t.step_func(e => {
+ let subject = e.data.subject;
+ assert_true(subject != undefined, 'message must have a subject field');
+ switch (subject) {
+ case messageSubject.ERROR:
+ assert_unreached('Worker error: ' + e.data.info);
+ break;
+ case messageSubject.HANDLE:
+ const handle = e.data.info;
+ video.srcObject = handle;
+ video.play();
+ break;
+ default:
+ assert_unreached('Unexpected message subject: ' + subject);
+ }
+ });
+}, 'Test worker MediaSource construction, attachment, buffering and basic playback');
+
+// See mediasource-worker-handle-transfer.html for a case that tests race of
+// multiple simultaneous attachments of same handle to multiple elements.
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.js
new file mode 100644
index 0000000000..5c4760bf7b
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-play.js
@@ -0,0 +1,74 @@
+importScripts("mediasource-worker-util.js");
+
+// Note, we do not use testharness.js utilities within the worker context
+// because it also communicates using postMessage to the main HTML document's
+// harness, and would confuse the test case message parsing there.
+
+onmessage = function(evt) {
+ postMessage({ subject: messageSubject.ERROR, info: "No message expected by Worker"});
+};
+
+let util = new MediaSourceWorkerUtil();
+let handle = util.mediaSource.handle;
+
+util.mediaSource.addEventListener('sourceopen', () => {
+ // Immediately re-verify the SameObject property of the handle we transferred.
+ if (handle !== util.mediaSource.handle) {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: 'mediaSource.handle changed from the original value'
+ });
+ }
+
+ // Also verify that transferring the already-transferred handle instance is
+ // prevented correctly.
+ try {
+ postMessage(
+ {
+ subject: messageSubject.ERROR,
+ info:
+ 'This postMessage should fail: the handle has already been transferred',
+ extra_info: util.mediaSource.handle
+ },
+ {transfer: [util.mediaSource.handle]});
+ } catch (e) {
+ if (e.name != 'DataCloneError') {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: 'Expected handle retransfer exception did not occur'
+ });
+ }
+ }
+
+ sourceBuffer = util.mediaSource.addSourceBuffer(util.mediaMetadata.type);
+ sourceBuffer.onerror = (err) => {
+ postMessage({ subject: messageSubject.ERROR, info: err });
+ };
+ sourceBuffer.onupdateend = () => {
+ // Reset the parser. Unnecessary for this buffering, except helps with test
+ // coverage.
+ sourceBuffer.abort();
+ // Shorten the buffered media and test playback duration to avoid timeouts.
+ sourceBuffer.remove(0.5, Infinity);
+ sourceBuffer.onupdateend = () => {
+ util.mediaSource.duration = 0.5;
+ // Issue changeType to the same type that we've already buffered.
+ // Unnecessary for this buffering, except helps with test coverage.
+ sourceBuffer.changeType(util.mediaMetadata.type);
+ util.mediaSource.endOfStream();
+ // Sanity check the duration.
+ // Unnecessary for this buffering, except helps with test coverage.
+ var duration = util.mediaSource.duration;
+ if (isNaN(duration) || duration <= 0.0 || duration >= 1.0) {
+ postMessage({
+ subject: messageSubject.ERROR,
+ info: "mediaSource.duration " + duration + " is not within expected range (0,1)"
+ });
+ }
+ };
+ };
+ util.mediaLoadPromise.then(mediaData => { sourceBuffer.appendBuffer(mediaData); },
+ err => { postMessage({ subject: messageSubject.ERROR, info: err }) });
+}, {once: true});
+
+postMessage({ subject: messageSubject.HANDLE, info: handle }, { transfer: [handle] });
diff --git a/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-util.js b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-util.js
new file mode 100644
index 0000000000..7adaf82508
--- /dev/null
+++ b/testing/web-platform/tests/media-source/dedicated-worker/mediasource-worker-util.js
@@ -0,0 +1,60 @@
+// This script is intended to be imported into a worker's script, and provides
+// common preparation for multiple test cases. Errors encountered are either
+// postMessaged with subject of messageSubject.ERROR, or in the case of failed
+// mediaLoadPromise, result in promise rejection.
+
+importScripts("mediasource-message-util.js");
+
+if (!this.MediaSource)
+ postMessage({ subject: messageSubject.ERROR, info: "MediaSource API missing from Worker" });
+
+let MEDIA_LIST = [
+ {
+ url: '../mp4/test.mp4',
+ type: 'video/mp4; codecs="mp4a.40.2,avc1.4d400d"',
+ },
+ {
+ url: '../webm/test.webm',
+ type: 'video/webm; codecs="vp8, vorbis"',
+ },
+];
+
+class MediaSourceWorkerUtil {
+ constructor() {
+ this.mediaSource = new MediaSource();
+
+ // Find supported test media, if any.
+ this.foundSupportedMedia = false;
+ for (let i = 0; i < MEDIA_LIST.length; ++i) {
+ this.mediaMetadata = MEDIA_LIST[i];
+ if (MediaSource.isTypeSupported(this.mediaMetadata.type)) {
+ this.foundSupportedMedia = true;
+ break;
+ }
+ }
+
+ // Begin asynchronous fetch of the test media.
+ if (this.foundSupportedMedia) {
+ this.mediaLoadPromise = MediaSourceWorkerUtil.loadBinaryAsync(this.mediaMetadata.url);
+ } else {
+ postMessage({ subject: messageSubject.ERROR, info: "No supported test media" });
+ }
+ }
+
+ static loadBinaryAsync(url) {
+ return new Promise((resolve, reject) => {
+ let request = new XMLHttpRequest();
+ request.open("GET", url, true);
+ request.responseType = "arraybuffer";
+ request.onerror = event => { reject(event); };
+ request.onload = () => {
+ if (request.status != 200) {
+ reject("Unexpected loadData_ status code : " + request.status);
+ }
+ let response = new Uint8Array(request.response);
+ resolve(response);
+ };
+ request.send();
+ });
+ }
+}
diff --git a/testing/web-platform/tests/media-source/generate-config-change-tests.py b/testing/web-platform/tests/media-source/generate-config-change-tests.py
new file mode 100755
index 0000000000..6ab2c8bf46
--- /dev/null
+++ b/testing/web-platform/tests/media-source/generate-config-change-tests.py
@@ -0,0 +1,225 @@
+#!/usr/bin/python
+# Copyright (C) 2013 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""
+This is a script that generates the content and HTML files for Media Source
+codec config change LayoutTests.
+"""
+import json
+import os
+
+DURATION = 2
+MEDIA_FORMATS = ['webm', 'mp4']
+ENCODE_SETTINGS = [
+ ## Video-only files
+ # Frame rate changes
+ {'fs': '320x240', 'fr': 24, 'kfr': 8, 'c': '#ff0000', 'vbr': 128, 'abr': 0, 'asr': 0, 'ach': 0, 'afreq': 0},
+ {'fs': '320x240', 'fr': 30, 'kfr': 10, 'c': '#ff0000', 'vbr': 128, 'abr': 0, 'asr': 0, 'ach': 0, 'afreq': 0},
+ # Frame size change
+ {'fs': '640x480', 'fr': 30, 'kfr': 10, 'c': '#00ff00', 'vbr': 128, 'abr': 0, 'asr': 0, 'ach': 0, 'afreq': 0},
+ # Bitrate change
+ {'fs': '320x240', 'fr': 30, 'kfr': 10, 'c': '#ff00ff', 'vbr': 256, 'abr': 0, 'asr': 0, 'ach': 0, 'afreq': 0},
+
+ ## Audio-only files
+ # Bitrate/Codebook changes
+ {'fs': '0x0', 'fr': 0, 'kfr': 0, 'c': '#000000', 'vbr': 0, 'abr': 128, 'asr': 44100, 'ach': 1, 'afreq': 2000},
+ {'fs': '0x0', 'fr': 0, 'kfr': 0, 'c': '#000000', 'vbr': 0, 'abr': 192, 'asr': 44100, 'ach': 1, 'afreq': 4000},
+
+ ## Audio-Video files
+ # Frame size change.
+ {'fs': '320x240', 'fr': 30, 'kfr': 10, 'c': '#ff0000', 'vbr': 256, 'abr': 128, 'asr': 44100, 'ach': 1, 'afreq': 2000},
+ {'fs': '640x480', 'fr': 30, 'kfr': 10, 'c': '#00ff00', 'vbr': 256, 'abr': 128, 'asr': 44100, 'ach': 1, 'afreq': 2000},
+ # Audio bitrate change.
+ {'fs': '640x480', 'fr': 30, 'kfr': 10, 'c': '#00ff00', 'vbr': 256, 'abr': 192, 'asr': 44100, 'ach': 1, 'afreq': 4000},
+ # Video bitrate change.
+ {'fs': '640x480', 'fr': 30, 'kfr': 10, 'c': '#00ffff', 'vbr': 512, 'abr': 128, 'asr': 44100, 'ach': 1, 'afreq': 2000},
+]
+
+CONFIG_CHANGE_TESTS = [
+ ["v-framerate", 0, 1, "Tests %s video-only frame rate changes."],
+ ["v-framesize", 1, 2, "Tests %s video-only frame size changes."],
+ ["v-bitrate", 1, 3, "Tests %s video-only bitrate changes."],
+ ["a-bitrate", 4, 5, "Tests %s audio-only bitrate changes."],
+ ["av-framesize", 6, 7, "Tests %s frame size changes in multiplexed content."],
+ ["av-audio-bitrate", 7, 8, "Tests %s audio bitrate changes in multiplexed content."],
+ ["av-video-bitrate", 7, 9, "Tests %s video bitrate changes in multiplexed content."]
+]
+
+CODEC_INFO = {
+ "mp4": {"audio": "mp4a.40.2", "video": "avc1.4D4001"},
+ "webm": {"audio": "vorbis", "video": "vp8"}
+}
+
+HTML_TEMPLATE = """<!DOCTYPE html>
+<html>
+ <head>
+ <script src="/w3c/resources/testharness.js"></script>
+ <script src="/w3c/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("%(media_format)s", "%(idA)s", "%(idB)s", "%(description)s");
+ </script>
+ </body>
+</html>
+"""
+
+def run(cmd_line):
+ os.system(" ".join(cmd_line))
+
+def generate_manifest(filename, media_filename, media_format, has_audio, has_video):
+ major_type = "audio"
+ if has_video:
+ major_type = "video"
+
+ codecs = []
+ if has_video:
+ codecs.append(CODEC_INFO[media_format]["video"])
+
+ if has_audio:
+ codecs.append(CODEC_INFO[media_format]["audio"])
+
+ mimetype = "%s/%s;codecs=\"%s\"" % (major_type, media_format, ",".join(codecs))
+
+ manifest = { 'url': media_filename, 'type': mimetype}
+
+ f = open(filename, "wb")
+ f.write(json.dumps(manifest, indent=4, separators=(',', ': ')))
+ f.close()
+
+def generate_test_html(media_format, config_change_tests, encoding_ids):
+ for test_info in config_change_tests:
+ filename = "../../media-source/mediasource-config-change-%s-%s.html" % (media_format, test_info[0])
+ html = HTML_TEMPLATE % {'media_format': media_format,
+ 'idA': encoding_ids[test_info[1]],
+ 'idB': encoding_ids[test_info[2]],
+ 'description': test_info[3] % (media_format)}
+ f = open(filename, "wb")
+ f.write(html)
+ f.close()
+
+
+def main():
+ encoding_ids = []
+
+ for media_format in MEDIA_FORMATS:
+ run(["mkdir ", media_format])
+
+ for settings in ENCODE_SETTINGS:
+ video_bitrate = settings['vbr']
+ has_video = (video_bitrate > 0)
+
+ audio_bitrate = settings['abr']
+ has_audio = (audio_bitrate > 0)
+ bitrate = video_bitrate + audio_bitrate
+
+ frame_size = settings['fs']
+ frame_rate = settings['fr']
+ keyframe_rate = settings['kfr']
+ color = settings['c']
+
+ sample_rate = settings['asr']
+ channels = settings['ach']
+ frequency = settings['afreq']
+
+ cmdline = ["ffmpeg", "-y"]
+
+ id_prefix = ""
+ id_params = ""
+ if has_audio:
+ id_prefix += "a"
+ id_params += "-%sHz-%sch" % (sample_rate, channels)
+
+ channel_layout = "FC"
+ sin_func = "sin(%s*2*PI*t)" % frequency
+ func = sin_func
+ if channels == 2:
+ channel_layout += "|BC"
+ func += "|" + sin_func
+
+ cmdline += ["-f", "lavfi", "-i", "aevalsrc=\"%s:s=%s:c=%s:d=%s\"" % (func, sample_rate, channel_layout, DURATION)]
+
+ if has_video:
+ id_prefix += "v"
+ id_params += "-%s-%sfps-%skfr" % (frame_size, frame_rate, keyframe_rate)
+
+ cmdline += ["-f", "lavfi", "-i", "color=%s:duration=%s:size=%s:rate=%s" % (color, DURATION, frame_size, frame_rate)]
+
+ if has_audio:
+ cmdline += ["-b:a", "%sk" % audio_bitrate]
+
+ if has_video:
+ cmdline += ["-b:v", "%sk" % video_bitrate]
+ cmdline += ["-keyint_min", "%s" % keyframe_rate]
+ cmdline += ["-g", "%s" % keyframe_rate]
+
+
+ textOverlayInfo = "'drawtext=fontfile=Mono:fontsize=32:text=Time\\\\:\\\\ %{pts}"
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=32:text=Size\\\\:\\\\ %s" % (frame_size)
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=64:text=Bitrate\\\\:\\\\ %s" % (bitrate)
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=96:text=FrameRate\\\\:\\\\ %s" % (frame_rate)
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=128:text=KeyFrameRate\\\\:\\\\ %s" % (keyframe_rate)
+
+ if has_audio:
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=160:text=SampleRate\\\\:\\\\ %s" % (sample_rate)
+ textOverlayInfo += ",drawtext=fontfile=Mono:fontsize=32:y=192:text=Channels\\\\:\\\\ %s" % (channels)
+
+ textOverlayInfo += "'"
+ cmdline += ["-vf", textOverlayInfo]
+
+ encoding_id = "%s-%sk%s" % (id_prefix, bitrate, id_params)
+
+ if len(encoding_ids) < len(ENCODE_SETTINGS):
+ encoding_ids.append(encoding_id)
+
+ filename_base = "%s/test-%s" % (media_format, encoding_id)
+ media_filename = filename_base + "." + media_format
+ manifest_filename = filename_base + "-manifest.json"
+
+ cmdline.append(media_filename)
+ run(cmdline)
+
+ # Remux file so it conforms to MSE bytestream requirements.
+ if media_format == "webm":
+ tmp_filename = media_filename + ".tmp"
+ run(["mse_webm_remuxer", media_filename, tmp_filename])
+ run(["mv", tmp_filename, media_filename])
+ elif media_format == "mp4":
+ run(["MP4Box", "-dash", "250", "-rap", media_filename])
+ run(["mv", filename_base + "_dash.mp4", media_filename])
+ run(["rm", filename_base + "_dash.mpd"])
+
+ generate_manifest(manifest_filename, media_filename, media_format, has_audio, has_video)
+ generate_test_html(media_format, CONFIG_CHANGE_TESTS, encoding_ids)
+
+if '__main__' == __name__:
+ main()
diff --git a/testing/web-platform/tests/media-source/idlharness.window.js b/testing/web-platform/tests/media-source/idlharness.window.js
new file mode 100644
index 0000000000..9300f67fe0
--- /dev/null
+++ b/testing/web-platform/tests/media-source/idlharness.window.js
@@ -0,0 +1,35 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: timeout=long
+
+// https://w3c.github.io/media-source/
+
+'use strict';
+
+idl_test(
+ ['media-source'],
+ ['dom', 'html', 'url'],
+ async idl_array => {
+ idl_array.add_objects({
+ MediaSource: ['mediaSource'],
+ SourceBuffer: ['sourceBuffer'],
+ SourceBufferList: ['mediaSource.sourceBuffers'],
+ });
+
+ const video = document.createElement('video');
+ self.mediaSource = new MediaSource();
+ video.src = URL.createObjectURL(mediaSource);
+
+ self.sourceBuffer = await new Promise((resolve, reject) => {
+ mediaSource.addEventListener('sourceopen', () => {
+ var defaultType = 'video/webm;codecs="vp8,vorbis"';
+ if (MediaSource.isTypeSupported(defaultType)) {
+ resolve(mediaSource.addSourceBuffer(defaultType));
+ } else {
+ resolve(mediaSource.addSourceBuffer('video/mp4'));
+ }
+ });
+ step_timeout(() => reject(new Error('sourceopen event not fired')), 3000);
+ });
+ }
+);
diff --git a/testing/web-platform/tests/media-source/import_tests.sh b/testing/web-platform/tests/media-source/import_tests.sh
new file mode 100755
index 0000000000..a87619c024
--- /dev/null
+++ b/testing/web-platform/tests/media-source/import_tests.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+if [ $# -lt 1 ]
+then
+ echo "Usage: $0 <Blink directory>"
+ exit -1
+fi
+
+BLINK_ROOT=$1
+LAYOUT_TEST_DIR=$BLINK_ROOT/LayoutTests
+HTTP_MEDIA_TEST_DIR=$LAYOUT_TEST_DIR/http/tests/media
+
+if [ ! -d "$BLINK_ROOT" ]
+then
+ echo "$BLINK_ROOT is not a directory or doesn't exist"
+ exit -1
+fi
+
+if [ ! -d "$LAYOUT_TEST_DIR" ]
+then
+ echo "$LAYOUT_TEST_DIR is not a directory or doesn't exist"
+ exit -1
+fi
+
+#rm -rf *.html *.js webm mp4 manifest.txt
+
+cp $HTTP_MEDIA_TEST_DIR/media-source/mediasource-*.html $HTTP_MEDIA_TEST_DIR/media-source/mediasource-*.js .
+cp -r $HTTP_MEDIA_TEST_DIR/resources/media-source/webm .
+cp -r $HTTP_MEDIA_TEST_DIR/resources/media-source/mp4 .
+
+# Remove Blink-specific files
+rm mediasource-gc-after-decode-error-crash.html
+
+sed -i 's/\/w3c\/resources\//\/resources\//g' *.html
+sed -i 's/\/media\/resources\/media-source\///g' *.html
+sed -i 's/\/media\/resources\/media-source\///g' *.js
+sed -i 's/\/media\/resources\/media-source\///g' webm/*
+
+
+for TEST_FILE in `ls *.html`
+do
+ if [ "$TEST_FILE" = "index.html" ]
+ then
+ continue
+ fi
+ echo -e "$TEST_FILE" >> manifest.txt
+done
+
+cp import_tests-template.txt index.html
+
+chmod -R a+r *.html *.js webm mp4 manifest.txt
+chmod a+rx webm mp4
diff --git a/testing/web-platform/tests/media-source/manifest.txt b/testing/web-platform/tests/media-source/manifest.txt
new file mode 100644
index 0000000000..3ca784f17a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/manifest.txt
@@ -0,0 +1,55 @@
+interfaces.html
+mediasource-activesourcebuffers.html
+mediasource-addsourcebuffer.html
+mediasource-addsourcebuffer-mode.html
+mediasource-append-buffer.html
+mediasource-appendbuffer-quota-exceeded.html
+mediasource-appendwindow.html
+mediasource-attach-stops-delaying-load-event.html
+mediasource-avtracks.html
+mediasource-buffered.html
+mediasource-closed.html
+mediasource-config-change-mp4-a-bitrate.html
+mediasource-config-change-mp4-av-audio-bitrate.html
+mediasource-config-change-mp4-av-framesize.html
+mediasource-config-change-mp4-av-video-bitrate.html
+mediasource-config-change-mp4-v-bitrate.html
+mediasource-config-change-mp4-v-framerate.html
+mediasource-config-change-mp4-v-framesize.html
+mediasource-config-change-webm-a-bitrate.html
+mediasource-config-change-webm-av-audio-bitrate.html
+mediasource-config-change-webm-av-framesize.html
+mediasource-config-change-webm-av-video-bitrate.html
+mediasource-config-change-webm-v-bitrate.html
+mediasource-config-change-webm-v-framerate.html
+mediasource-config-change-webm-v-framesize.html
+mediasource-detach.html
+mediasource-duration-boundaryconditions.html
+mediasource-duration.html
+mediasource-endofstream.html
+mediasource-endofstream-invaliderror.html
+mediasource-errors.html
+mediasource-is-type-supported.html
+mediasource-liveseekable.html
+mediasource-multiple-attach.html
+mediasource-play.html
+mediasource-play-then-seek-back.html
+mediasource-preload.html
+mediasource-redundant-seek.html
+mediasource-remove.html
+mediasource-removesourcebuffer.html
+mediasource-seekable.html
+mediasource-seek-beyond-duration.html
+mediasource-seek-during-pending-seek.html
+mediasource-sequencemode-append-buffer.html
+mediasource-sourcebufferlist.html
+mediasource-sourcebuffer-mode.html
+mediasource-sourcebuffer-mode-timestamps.html
+mediasource-timestamp-offset.html
+SourceBuffer-abort.html
+SourceBuffer-abort-readyState.html
+SourceBuffer-abort-removed.html
+SourceBuffer-abort-updating.html
+URL-createObjectURL.html
+URL-createObjectURL-null.html
+URL-createObjectURL-revoke.html
diff --git a/testing/web-platform/tests/media-source/mediasource-activesourcebuffers.html b/testing/web-platform/tests/media-source/mediasource-activesourcebuffers.html
new file mode 100644
index 0000000000..02ebecc773
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-activesourcebuffers.html
@@ -0,0 +1,238 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Checks MediaSource.activeSourceBuffers and changes to selected/enabled track state</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ // Audio / Video files supported by the user agent under test
+ var subType = MediaSourceUtil.getSubType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ var manifestFilenameAudio = subType + "/test-a-128k-44100Hz-1ch-manifest.json";
+ var manifestFilenameVideo = subType + "/test-v-128k-320x240-30fps-10kfr-manifest.json";
+ var manifestFilenameAV = subType + "/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json";
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function (typeAudio, dataAudio)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(typeAudio);
+ assert_equals(mediaSource.sourceBuffers.length, 1,
+ "sourceBuffers list contains one SourceBuffer");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0,
+ "activeSourceBuffers is empty to start with");
+
+ test.expectEvent(mediaSource.activeSourceBuffers, "addsourcebuffer");
+ test.expectEvent(mediaElement, "loadedmetadata");
+ sourceBuffer.appendBuffer(dataAudio);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "activeSourceBuffers updated when media element is loaded");
+ assert_equals(mediaSource.activeSourceBuffers[0], sourceBuffer,
+ "activeSourceBuffers contains sourceBuffer when media element is loaded");
+ test.done();
+ });
+ });
+ }, "SourceBuffer added to activeSourceBuffers list when its only audio track gets loaded (and thus becomes enabled).");
+
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameVideo, function (typeVideo, dataVideo)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(typeVideo);
+ assert_equals(mediaSource.sourceBuffers.length, 1,
+ "sourceBuffers list contains one SourceBuffer");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0,
+ "activeSourceBuffers is empty to start with");
+
+ test.expectEvent(mediaSource.activeSourceBuffers, "addsourcebuffer");
+ test.expectEvent(mediaElement, "loadedmetadata");
+ sourceBuffer.appendBuffer(dataVideo);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "activeSourceBuffers updated when media element is loaded");
+ assert_equals(mediaSource.activeSourceBuffers[0], sourceBuffer,
+ "activeSourceBuffers contains sourceBuffer when media element is loaded");
+ test.done();
+ });
+ });
+ }, "SourceBuffer added to activeSourceBuffers list when its only video track gets loaded (and thus becomes selected).");
+
+ function mediaSourceActiveSourceBufferOrderTest(addAudioFirst, appendAudioFirst)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function (typeAudio, dataAudio)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameVideo, function (typeVideo, dataVideo)
+ {
+ var sourceBufferAudio, sourceBufferVideo, expectedFirstSB, expectedSecondSB;
+ if (addAudioFirst) {
+ expectedFirstSB = sourceBufferAudio = mediaSource.addSourceBuffer(typeAudio);
+ expectedSecondSB = sourceBufferVideo = mediaSource.addSourceBuffer(typeVideo);
+ } else {
+ expectedFirstSB = sourceBufferVideo = mediaSource.addSourceBuffer(typeVideo);
+ expectedSecondSB = sourceBufferAudio = mediaSource.addSourceBuffer(typeAudio);
+ }
+
+ assert_equals(mediaSource.activeSourceBuffers.length, 0,
+ "activeSourceBuffers is empty to start with");
+ assert_equals(mediaSource.sourceBuffers.length, 2,
+ "sourceBuffers list contains both SourceBuffers");
+ assert_equals(mediaSource.sourceBuffers[0], expectedFirstSB,
+ "first SourceBuffer matches expectation");
+ assert_equals(mediaSource.sourceBuffers[1], expectedSecondSB,
+ "second SourceBuffer matches expectation");
+ test.expectEvent(mediaSource.activeSourceBuffers, "addsourcebuffer");
+ test.expectEvent(mediaSource.activeSourceBuffers, "addsourcebuffer");
+ test.expectEvent(mediaElement, "loadedmetadata");
+ if (appendAudioFirst) {
+ sourceBufferAudio.appendBuffer(dataAudio);
+ sourceBufferVideo.appendBuffer(dataVideo);
+ } else {
+ sourceBufferVideo.appendBuffer(dataVideo);
+ sourceBufferAudio.appendBuffer(dataAudio);
+ }
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 2,
+ "activeSourceBuffers list updated when tracks are loaded");
+ assert_equals(mediaSource.activeSourceBuffers[0], mediaSource.sourceBuffers[0],
+ "first active SourceBuffer matches first SourceBuffer");
+ assert_equals(mediaSource.activeSourceBuffers[1], mediaSource.sourceBuffers[1],
+ "second active SourceBuffer matches second SourceBuffer");
+ test.done();
+ });
+ });
+ });
+ },
+ "Active SourceBuffers must appear in the same order as they appear in the sourceBuffers attribute: " +
+ (addAudioFirst ? "audio is first sourceBuffer" : "video is first sourceBuffer") + ", " +
+ (appendAudioFirst ? "audio media appended first" : "video media appended first"));
+ }
+
+ mediaSourceActiveSourceBufferOrderTest(true, true);
+ mediaSourceActiveSourceBufferOrderTest(true, false);
+ mediaSourceActiveSourceBufferOrderTest(false, true);
+ mediaSourceActiveSourceBufferOrderTest(false, false);
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function (typeAudio, dataAudio)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameVideo, function (typeVideo, dataVideo)
+ {
+ var sourceBufferAudio = mediaSource.addSourceBuffer(typeAudio);
+ var sourceBufferVideo = mediaSource.addSourceBuffer(typeVideo);
+
+ test.expectEvent(sourceBufferAudio.audioTracks, "addtrack");
+ test.expectEvent(sourceBufferVideo.videoTracks, "addtrack");
+ sourceBufferAudio.appendBuffer(dataAudio);
+ sourceBufferVideo.appendBuffer(dataVideo);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 2,
+ "activeSourceBuffers list updated when tracks are loaded");
+ assert_equals(sourceBufferAudio.audioTracks.length, 1,
+ "audio track list contains loaded audio track");
+ assert_equals(sourceBufferVideo.videoTracks.length, 1,
+ "video track list contains loaded video track");
+
+ test.expectEvent(mediaSource.activeSourceBuffers, "removesourcebuffer");
+ sourceBufferAudio.audioTracks[0].enabled = false;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "audio source buffer no longer in the activeSourceBuffers list");
+ assert_equals(mediaSource.activeSourceBuffers[0], sourceBufferVideo,
+ "activeSourceBuffers list only contains the video SourceBuffer");
+
+ test.expectEvent(mediaSource.activeSourceBuffers, "addsourcebuffer");
+ test.expectEvent(mediaSource.activeSourceBuffers, "removesourcebuffer");
+ sourceBufferAudio.audioTracks[0].enabled = true;
+ sourceBufferVideo.videoTracks[0].selected = false;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "video source buffer no longer in the activeSourceBuffers list");
+ assert_equals(mediaSource.activeSourceBuffers[0], sourceBufferAudio,
+ "activeSourceBuffers list only contains the audio SourceBuffer");
+ test.done();
+ });
+ });
+ });
+ }, "Active SourceBuffers list reflects changes to selected audio/video tracks associated with separate SourceBuffers.");
+
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAV, function (typeAV, dataAV)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(typeAV);
+
+ test.expectEvent(sourceBuffer.audioTracks, "addtrack");
+ test.expectEvent(sourceBuffer.videoTracks, "addtrack");
+ sourceBuffer.appendBuffer(dataAV);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "activeSourceBuffers list updated when tracks are loaded");
+ assert_equals(sourceBuffer.audioTracks.length, 1,
+ "audio track list contains loaded audio track");
+ assert_equals(sourceBuffer.videoTracks.length, 1,
+ "video track list contains loaded video track");
+
+ mediaSource.activeSourceBuffers.addEventListener("removesourcebuffer", test.unreached_func(
+ "Unexpected removal from activeSourceBuffers list"));
+ mediaSource.activeSourceBuffers.addEventListener("addsourcebuffer", test.unreached_func(
+ "Unexpected insertion in activeSourceBuffers list"));
+
+ // Changes should only trigger events at the
+ // AudioTrack/VideoTrack instance
+ test.expectEvent(sourceBuffer.audioTracks, "change");
+ sourceBuffer.audioTracks[0].enabled = false;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "activeSourceBuffers list unchanged");
+
+ test.expectEvent(sourceBuffer.videoTracks, "change");
+ sourceBuffer.audioTracks[0].enabled = true;
+ sourceBuffer.videoTracks[0].selected = false;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.activeSourceBuffers.length, 1,
+ "activeSourceBuffers list unchanged");
+ test.done();
+ });
+ });
+ }, "Active SourceBuffers list ignores changes to selected audio/video tracks " +
+ "that do not affect the activation of the SourceBuffer.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-addsourcebuffer-mode.html b/testing/web-platform/tests/media-source/mediasource-addsourcebuffer-mode.html
new file mode 100644
index 0000000000..cf7f57f8e2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-addsourcebuffer-mode.html
@@ -0,0 +1,31 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>Checks MediaSource.addSourceBuffer() sets SourceBuffer.mode appropriately</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ // Note all mime types in mediasource-util.js
+ // set the "generate timestamps flag" to false
+ var mime = MediaSourceUtil.VIDEO_ONLY_TYPE;
+ var sourceBuffer = mediaSource.addSourceBuffer(mime);
+ assert_equals(sourceBuffer.mode, "segments");
+ test.done();
+ }, "addSourceBuffer() sets SourceBuffer.mode to 'segments' when the generate timestamps flag is false");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var mime = 'audio/aac';
+ if (!MediaSource.isTypeSupported(mime)) {
+ mime = 'audio/mpeg';
+ if (!MediaSource.isTypeSupported(mime)) {
+ assert_unreached("Browser does not support the audio/aac and audio/mpeg MIME types used in this test");
+ }
+ }
+ sourceBuffer = mediaSource.addSourceBuffer(mime);
+ assert_equals(sourceBuffer.mode, "sequence");
+ test.done();
+ }, "addSourceBuffer() sets SourceBuffer.mode to 'sequence' when the generate timestamps flag is true");
+</script> \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mediasource-addsourcebuffer.html b/testing/web-platform/tests/media-source/mediasource-addsourcebuffer.html
new file mode 100644
index 0000000000..a95155aefc
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-addsourcebuffer.html
@@ -0,0 +1,133 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.addSourceBuffer() test cases</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaSource.endOfStream();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE); },
+ "addSourceBuffer() threw an exception when in 'ended' state.");
+ test.done();
+ }, "Test addSourceBuffer() in 'ended' state.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ assert_throws_js(TypeError,
+ function() { mediaSource.addSourceBuffer(""); },
+ "addSourceBuffer() threw an exception when passed an empty string.");
+ test.done();
+ }, "Test addSourceBuffer() with empty type");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ assert_throws_dom("NotSupportedError",
+ function() { mediaSource.addSourceBuffer(null); },
+ "addSourceBuffer() threw an exception when passed null.");
+ test.done();
+ }, "Test addSourceBuffer() with null");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ assert_throws_dom("NotSupportedError",
+ function() { mediaSource.addSourceBuffer("invalidType"); },
+ "addSourceBuffer() threw an exception for an unsupported type.");
+ test.done();
+ }, "Test addSourceBuffer() with unsupported type");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var mimetype = 'video/webm;codecs="vp8,vorbis"';
+
+ assert_true(MediaSource.isTypeSupported(mimetype), mimetype + " is supported");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+ assert_equals(mediaSource.sourceBuffers[0], sourceBuffer, "SourceBuffer is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBuffer is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() with Vorbis and VP8");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var videoMimetype = 'video/webm;codecs="vp8"';
+ var audioMimetype = 'audio/webm;codecs="vorbis"';
+
+ assert_true(MediaSource.isTypeSupported(videoMimetype), videoMimetype + " is supported");
+ assert_true(MediaSource.isTypeSupported(audioMimetype), audioMimetype + " is supported");
+
+ var sourceBufferA = mediaSource.addSourceBuffer(videoMimetype);
+ var sourceBufferB = mediaSource.addSourceBuffer(audioMimetype);
+ assert_equals(mediaSource.sourceBuffers[0], sourceBufferA, "sourceBufferA is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBufferA is not in mediaSource.activeSourceBuffers");
+ assert_equals(mediaSource.sourceBuffers[1], sourceBufferB, "sourceBufferB is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBufferB is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() with Vorbis and VP8 in separate SourceBuffers");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var mimetype = MediaSourceUtil.VIDEO_ONLY_TYPE;
+
+ assert_true(MediaSource.isTypeSupported(mimetype), mimetype + " is supported");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+ assert_equals(mediaSource.sourceBuffers[0], sourceBuffer, "SourceBuffer is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBuffer is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() video only");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var mimetype = MediaSourceUtil.AUDIO_ONLY_TYPE;
+
+ assert_true(MediaSource.isTypeSupported(mimetype), mimetype + " is supported");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+ assert_equals(mediaSource.sourceBuffers[0], sourceBuffer, "SourceBuffer is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBuffer is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() audio only");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var mimetype = 'video/mp4;codecs="avc1.4D4001,mp4a.40.2"';
+
+ assert_true(MediaSource.isTypeSupported(mimetype), mimetype + " is supported");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+ assert_equals(mediaSource.sourceBuffers[0], sourceBuffer, "SourceBuffer is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBuffer is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() with AAC and H.264");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var videoMimetype = 'video/mp4;codecs="avc1.4D4001"';
+ var audioMimetype = 'audio/mp4;codecs="mp4a.40.2"';
+
+ assert_true(MediaSource.isTypeSupported(videoMimetype), videoMimetype + " is supported");
+ assert_true(MediaSource.isTypeSupported(audioMimetype), audioMimetype + " is supported");
+
+ var sourceBufferA = mediaSource.addSourceBuffer(videoMimetype);
+ var sourceBufferB = mediaSource.addSourceBuffer(audioMimetype);
+ assert_equals(mediaSource.sourceBuffers[0], sourceBufferA, "sourceBufferA is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBufferA is not in mediaSource.activeSourceBuffers");
+ assert_equals(mediaSource.sourceBuffers[1], sourceBufferB, "sourceBufferB is in mediaSource.sourceBuffers");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "SourceBufferB is not in mediaSource.activeSourceBuffers");
+ test.done();
+ }, "Test addSourceBuffer() with AAC and H.264 in separate SourceBuffers");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-append-buffer.html b/testing/web-platform/tests/media-source/mediasource-append-buffer.html
new file mode 100644
index 0000000000..750ccaf456
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-append-buffer.html
@@ -0,0 +1,623 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBuffer.appendBuffer() test cases</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test SourceBuffer.appendBuffer() event dispatching.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendBuffer(mediaData); },
+ "appendBuffer() throws an exception there is a pending append.");
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test SourceBuffer.appendBuffer() call during a pending appendBuffer().");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "abort", "Append aborted.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ sourceBuffer.abort();
+
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test SourceBuffer.abort() call during a pending appendBuffer().");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+
+ test.expectEvent(mediaSource, "sourceended", "MediaSource sourceended event");
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, "ended", "MediaSource readyState is 'ended'");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "ended", "MediaSource readyState is 'ended'");
+
+ test.expectEvent(mediaSource, "sourceopen", "MediaSource sourceopen event");
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_equals(mediaSource.readyState, "open", "MediaSource readyState is 'open'");
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "open", "MediaSource readyState is 'open'");
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test SourceBuffer.appendBuffer() triggering an 'ended' to 'open' transition.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+
+ test.expectEvent(mediaSource, "sourceended", "MediaSource sourceended event");
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, "ended", "MediaSource readyState is 'ended'");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "ended", "MediaSource readyState is 'ended'");
+
+ test.expectEvent(mediaSource, "sourceopen", "MediaSource sourceopen event");
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+
+ assert_equals(mediaSource.readyState, "open", "MediaSource readyState is 'open'");
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "open", "MediaSource readyState is 'open'");
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test zero byte SourceBuffer.appendBuffer() call triggering an 'ended' to 'open' transition.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "abort", "Append aborted.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "activeSourceBuffers.length");
+
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "sourceBuffers");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendBuffer(mediaData); },
+ "appendBuffer() throws an exception because it isn't attached to the mediaSource anymore.");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test MediaSource.removeSourceBuffer() call during a pending appendBuffer().");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.duration = 1.0; },
+ "set duration throws an exception when updating attribute is true.");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test set MediaSource.duration during a pending appendBuffer() for one of its SourceBuffers.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ mediaSource.addEventListener("sourceended", test.unreached_func("Unexpected event 'sourceended'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.endOfStream(); },
+ "endOfStream() throws an exception when updating attribute is true.");
+
+ assert_equals(mediaSource.readyState, "open");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ assert_equals(mediaSource.readyState, "open");
+ test.done();
+ });
+ }, "Test MediaSource.endOfStream() during a pending appendBuffer() for one of its SourceBuffers.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.timestampOffset = 10.0; },
+ "set timestampOffset throws an exception when updating attribute is true.");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test set SourceBuffer.timestampOffset during a pending appendBuffer().");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test appending an empty ArrayBufferView.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+
+ var arrayBufferView = new Uint8Array(mediaData);
+
+ assert_equals(arrayBufferView.length, mediaData.length, "arrayBufferView.length before transfer.");
+
+ // Send the buffer as in a message so it gets neutered.
+ window.postMessage( "test", "*", [arrayBufferView.buffer]);
+
+ assert_equals(arrayBufferView.length, 0, "arrayBufferView.length after transfer.");
+
+ sourceBuffer.appendBuffer(arrayBufferView);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test appending a neutered ArrayBufferView.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendBuffer(new ArrayBuffer(0));
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test appending an empty ArrayBuffer.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+
+ var arrayBuffer = mediaData.buffer.slice(0);
+
+ assert_equals(arrayBuffer.byteLength, mediaData.buffer.byteLength, "arrayBuffer.byteLength before transfer.");
+
+ // Send the buffer as in a message so it gets neutered.
+ window.postMessage( "test", "*", [arrayBuffer]);
+
+ assert_equals(arrayBuffer.byteLength, 0, "arrayBuffer.byteLength after transfer.");
+
+ sourceBuffer.appendBuffer(arrayBuffer);
+
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test appending a neutered ArrayBuffer.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var halfIndex = (initSegment.length + 1) / 2;
+ var partialInitSegment = initSegment.subarray(0, halfIndex);
+ var remainingInitSegment = initSegment.subarray(halfIndex);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "updateend", "partialInitSegment append ended.");
+ sourceBuffer.appendBuffer(partialInitSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_NOTHING);
+ assert_equals(mediaSource.duration, Number.NaN);
+ test.expectEvent(sourceBuffer, "updateend", "remainingInitSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "loadedmetadata event received.");
+ sourceBuffer.appendBuffer(remainingInitSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA);
+ assert_equals(mediaSource.duration, segmentInfo.duration);
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadeddata", "loadeddata fired.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA);
+ assert_equals(sourceBuffer.updating, false);
+ assert_equals(mediaSource.readyState, "open");
+ test.done();
+ });
+ }, "Test appendBuffer with partial init segments.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var halfIndex = (mediaSegment.length + 1) / 2;
+ var partialMediaSegment = mediaSegment.subarray(0, halfIndex);
+ var remainingMediaSegment = mediaSegment.subarray(halfIndex);
+
+ test.expectEvent(sourceBuffer, "updateend", "InitSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "loadedmetadata done.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA);
+ assert_equals(mediaSource.duration, segmentInfo.duration);
+ test.expectEvent(sourceBuffer, "updateend", "partial media segment append ended.");
+ sourceBuffer.appendBuffer(partialMediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadeddata", "loadeddata fired.");
+ sourceBuffer.appendBuffer(remainingMediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA);
+ assert_equals(mediaSource.readyState, "open");
+ assert_equals(sourceBuffer.updating, false);
+ test.done();
+ });
+ }, "Test appendBuffer with partial media segments.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_NOTHING);
+ assert_equals(mediaSource.duration, Number.NaN);
+
+ // readyState is changing as per the Initialization Segment Received algorithm.
+ var loadedmetadataCalled = false;
+ mediaElement.addEventListener("loadedmetadata", function metadata(e) {
+ loadedmetadataCalled = true;
+ e.target.removeEventListener(e.type, metadata);
+ });
+ sourceBuffer.addEventListener("updateend", test.step_func(function updateend(e) {
+ assert_true(loadedmetadataCalled);
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA);
+ e.target.removeEventListener(e.type, updateend);
+ }));
+ test.expectEvent(sourceBuffer, "updateend", "remainingInitSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "loadedmetadata event received.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA);
+ assert_equals(mediaSource.duration, segmentInfo.duration);
+ // readyState is changing as per the Coded Frame Processing algorithm.
+ var loadeddataCalled = false;
+ mediaElement.addEventListener("loadeddata", function loadeddata(e) {
+ loadeddataCalled = true;
+ e.target.removeEventListener(e.type, loadeddata);
+ });
+ sourceBuffer.addEventListener("updateend", test.step_func(function updateend(e) {
+ assert_true(loadeddataCalled);
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA);
+ e.target.removeEventListener(e.type, updateend);
+ }));
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadeddata", "loadeddata fired.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA);
+ assert_equals(sourceBuffer.updating, false);
+ assert_equals(mediaSource.readyState, "open");
+ test.done();
+ });
+ }, "Test appendBuffer events order.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var partialInitSegment = initSegment.subarray(0, initSegment.length / 2);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "updateend", "partialInitSegment append ended.");
+ sourceBuffer.appendBuffer(partialInitSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ // Call abort to reset the parser.
+ sourceBuffer.abort();
+
+ // Append the full intiialization segment.
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadeddata", "loadeddata fired.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test abort in the middle of an initialization segment.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "SourceBuffer removed.");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ test.waitForExpectedEvents(function()
+ {
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.abort(); },
+ "sourceBuffer.abort() throws an exception for InvalidStateError.");
+
+ test.done();
+ });
+ }, "Test abort after removing sourcebuffer.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "open", "readyState is open after init segment appended.");
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBuffer.buffered.length, 1, "sourceBuffer has a buffered range");
+ assert_equals(mediaSource.readyState, "open", "readyState is open after media segment appended.");
+ test.expectEvent(mediaSource, "sourceended", "source ended");
+ mediaSource.endOfStream();
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "ended", "readyState is ended.");
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.abort(); },
+ "sourceBuffer.abort() throws an exception for InvalidStateError.");
+ test.done();
+ });
+
+ }, "Test abort after readyState is ended following init segment and media segment.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+ sourceBuffer.appendWindowStart = 1;
+ sourceBuffer.appendWindowEnd = 100;
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ sourceBuffer.abort();
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is reset to 0");
+ assert_equals(sourceBuffer.appendWindowEnd, Number.POSITIVE_INFINITY,
+ "appendWindowEnd is reset to +INFINITY");
+ test.done();
+ });
+ }, "Test abort after appendBuffer update ends.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+
+ test.expectEvent(sourceBuffer, "updatestart", "Append started.");
+ test.expectEvent(sourceBuffer, "update", "Append success.");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended.");
+
+ assert_throws_js( TypeError,
+ function() { sourceBuffer.appendBuffer(null); },
+ "appendBuffer(null) throws an exception.");
+ test.done();
+ }, "Test appending null.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_throws_dom( "InvalidStateError",
+ function() { sourceBuffer.appendBuffer(mediaData); },
+ "appendBuffer() throws an exception when called after removeSourceBuffer().");
+ test.done();
+ }, "Test appending after removeSourceBuffer().");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ // Media elements using MSE should not fire the stalled event. See discussion at
+ // https://github.com/w3c/media-source/issues/88#issuecomment-374406928
+ mediaElement.addEventListener("stalled", test.unreached_func("Unexpected 'stalled' event."));
+
+ // Prime the media element with initial appends.
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "open", "readyState is open after init segment appended.");
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ // Verify state and wait for the 'stalled' event.
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBuffer.buffered.length, 1, "sourceBuffer has a buffered range");
+ assert_equals(mediaSource.readyState, "open", "readyState is open after media segment appended.");
+
+ // Set timeout to 4 seconds. This creates an opportunity for UAs to _improperly_ fire the stalled event.
+ // For media elements doing progressive download (not MSE), stalled is thrown after ~3 seconds of the
+ // download failing to progress.
+ test.step_timeout(function() { test.done(); }, 4000);
+ });
+ }, "Test slow appending does not trigger stalled events.");
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-appendbuffer-quota-exceeded.html b/testing/web-platform/tests/media-source/mediasource-appendbuffer-quota-exceeded.html
new file mode 100644
index 0000000000..c90d8448c5
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-appendbuffer-quota-exceeded.html
@@ -0,0 +1,75 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<meta name="timeout" content="long">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ var subType = MediaSourceUtil.getSubType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ var manifestFilenameAudio = subType + '/test-a-128k-44100Hz-1ch-manifest.json';
+
+ // Fill up a given SourceBuffer by appending data repeatedly via doAppendDataFunc until
+ // an exception is thrown. The thrown exception is passed to onCaughtExceptionCallback.
+ function fillUpSourceBuffer(test, sourceBuffer, doAppendDataFunc, onCaughtExceptionCallback) {
+ assert_false(sourceBuffer.updating, 'updating should be false before attempting an append operation');
+
+ // We are appending data repeatedly in sequence mode, there should be no gaps.
+ let sbLength = sourceBuffer.buffered.length;
+ assert_false(sbLength > 1, 'unexpected gap in buffered ranges.');
+
+ let previousBufferedStart = sbLength == 0 ? -Infinity : sourceBuffer.buffered.start(0);
+ let previousBufferedEnd = sbLength == 0 ? -Infinity : sourceBuffer.buffered.end(0);
+ let appendSucceeded = true;
+ try {
+ doAppendDataFunc();
+ } catch(ex) {
+ onCaughtExceptionCallback(ex, previousBufferedStart, previousBufferedEnd);
+ appendSucceeded = false;
+ }
+ if (appendSucceeded) {
+ assert_true(sourceBuffer.updating, 'updating should be true if synchronous portion of appendBuffer succeeded');
+ test.expectEvent(sourceBuffer, 'updateend', 'append ended.');
+ test.waitForExpectedEvents(function() { fillUpSourceBuffer(test, sourceBuffer, doAppendDataFunc, onCaughtExceptionCallback); });
+ }
+ }
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func('Unexpected media element error event'));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function(typeAudio, dataAudio)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(typeAudio);
+ sourceBuffer.mode = 'sequence';
+ fillUpSourceBuffer(test, sourceBuffer,
+ function () { // doAppendDataFunc
+ sourceBuffer.appendBuffer(dataAudio);
+ },
+ function (ex, previousBufferedStart, previousBufferedEnd) { // onCaughtExceptionCallback
+ assert_equals(ex.name, 'QuotaExceededError');
+ // Verify that the state looks like appendBuffer stops executing its steps if the prepare append
+ // algorithm aborts after throwing `QuotaExceededError`.
+
+ assert_false(sourceBuffer.updating, 'updating should be false if appendBuffer throws QuotaExceededError');
+
+ sourceBuffer.onupdatestart = test.unreached_func('buffer append, signalled by updatestart, should not be in progress');
+ sourceBuffer.onupdate = test.unreached_func('buffer append, signalled by update, should not be in progress');
+ sourceBuffer.onupdateend = test.unreached_func('buffer append, signalled by updateend, should not be in progress');
+
+ // Ensure the async append was not actually run by letting their event handlers have some time before we proceed.
+ test.step_timeout(function() {
+ // At least the first doAppendDataFunc call shouldn't throw QuotaExceededError, unless the audio
+ // test media itself is too large for one append. If that's the case, then many other tests should
+ // fail and the choice of test media may need to be changed.
+ assert_equals(sourceBuffer.buffered.length, 1, 'test expects precisely one buffered range here');
+ assert_equals(sourceBuffer.buffered.start(0), previousBufferedStart, 'QuotaExceededError should not update buffered media');
+ assert_equals(sourceBuffer.buffered.end(0), previousBufferedEnd, 'QuotaExceededError should not update buffered media');
+
+ // Note, it's difficult to verify that the user agent does not "Add |data| to the end of the |input buffer|" if
+ // the attempted appendBuffer() of that |data| caused QuotaExceededError.
+ test.done();
+ }, 1000 /* 1 second, modifiable by harness multiplier */ );
+ });
+ });
+ }, 'Appending data repeatedly should fill up the buffer and throw a QuotaExceededError when buffer is full.');
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-appendwindow.html b/testing/web-platform/tests/media-source/mediasource-appendwindow.html
new file mode 100644
index 0000000000..ba15bd6322
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-appendwindow.html
@@ -0,0 +1,176 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBuffer.appendWindowStart and SourceBuffer.appendWindowEnd test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+
+ sourceBuffer.appendWindowStart = 100.0;
+ sourceBuffer.appendWindowEnd = 500.0;
+ assert_equals(sourceBuffer.appendWindowStart, 100.0, "appendWindowStart is correctly set'");
+ assert_equals(sourceBuffer.appendWindowEnd, 500.0, "appendWindowEnd is correctly set'");
+
+ sourceBuffer.appendWindowStart = 200.0;
+ sourceBuffer.appendWindowEnd = 400.0;
+ assert_equals(sourceBuffer.appendWindowStart, 200.0, "appendWindowStart is correctly reset'");
+ assert_equals(sourceBuffer.appendWindowEnd, 400.0, "appendWindowEnd is correctly reset'");
+ test.done();
+ }, "Test correctly reset appendWindowStart and appendWindowEnd values");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+ sourceBuffer.appendWindowEnd = 500.0;
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = Number.NEGATIVE_INFINITY; },
+ "set appendWindowStart throws an exception for Number.NEGATIVE_INFINITY.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = Number.POSITIVE_INFINITY; },
+ "set appendWindowStart throws an exception for Number.POSITIVE_INFINITY.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = Number.NaN; },
+ "set appendWindowStart throws an exception for Number.NaN.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = 600.0; },
+ "set appendWindowStart throws an exception when greater than appendWindowEnd.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = sourceBuffer.appendWindowEnd; },
+ "set appendWindowStart throws an exception when equal to appendWindowEnd.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowEnd = sourceBuffer.appendWindowStart; },
+ "set appendWindowEnd throws an exception when equal to appendWindowStart.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowEnd = sourceBuffer.appendWindowStart - 1; },
+ "set appendWindowEnd throws an exception if less than appendWindowStart.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = -100.0; },
+ "set appendWindowStart throws an exception when less than 0.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowEnd = -100.0; },
+ "set appendWindowEnd throws an exception when less than 0.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowEnd = Number.NaN; },
+ "set appendWindowEnd throws an exception if NaN.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowEnd = undefined; },
+ "set appendWindowEnd throws an exception if undefined.");
+
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.appendWindowStart = undefined; },
+ "set appendWindowStart throws an exception if undefined.");
+
+ test.done();
+ }, "Test set wrong values to appendWindowStart and appendWindowEnd.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+
+ sourceBuffer.appendWindowStart = "";
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is 0");
+
+ sourceBuffer.appendWindowStart = "10";
+ assert_equals(sourceBuffer.appendWindowStart, 10, "appendWindowStart is 10");
+
+ sourceBuffer.appendWindowStart = null;
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is 0");
+
+ sourceBuffer.appendWindowStart = true;
+ assert_equals(sourceBuffer.appendWindowStart, 1, "appendWindowStart is 1");
+
+ sourceBuffer.appendWindowStart = false;
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is 0");
+
+ sourceBuffer.appendWindowEnd = "100";
+ assert_equals(sourceBuffer.appendWindowEnd, 100, "appendWindowEnd is 100");
+
+ test.done();
+
+ }, "Test set correct values to appendWindowStart and appendWindowEnd.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendWindowStart = 100.0; },
+ "set appendWindowStart throws an exception when mediasource object is not associated with a buffer.");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendWindowEnd = 500.0; },
+ "set appendWindowEnd throws an exception when mediasource object is not associated with a buffer.");
+ test.done();
+
+ }, "Test appendwindow throw error when mediasource object is not associated with a sourebuffer.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updateend", "sourceBuffer");
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendWindowStart = 100.0; },
+ "set appendWindowStart throws an exception when there is a pending append.");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.appendWindowEnd = 500.0; },
+ "set appendWindowEnd throws an exception when there is a pending append.");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test set appendWindowStart and appendWindowEnd when source buffer updating.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updateend", "sourceBuffer");
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, "updating attribute is true");
+
+ sourceBuffer.abort();
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is 0 after an abort'");
+ assert_equals(sourceBuffer.appendWindowEnd, Number.POSITIVE_INFINITY,
+ "appendWindowStart is POSITIVE_INFINITY after an abort");
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.done();
+ });
+ }, "Test appendWindowStart and appendWindowEnd value after a sourceBuffer.abort().");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(sourceBuffer.appendWindowStart, 0, "appendWindowStart is 0 initially");
+ assert_equals(sourceBuffer.appendWindowEnd, Number.POSITIVE_INFINITY,
+ "appendWindowStart is POSITIVE_INFINITY initially");
+ test.done();
+ }, "Test read appendWindowStart and appendWindowEnd initial values.");
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-attach-stops-delaying-load-event.html b/testing/web-platform/tests/media-source/mediasource-attach-stops-delaying-load-event.html
new file mode 100644
index 0000000000..6b5d5b0ad1
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-attach-stops-delaying-load-event.html
@@ -0,0 +1,49 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<title>Tests that MediaSource attachment stops delaying the load event.</title>
+<link rel="author" title="Matthew Wolenetz" href="mailto:wolenetz@chromium.org"/>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+async_test(function(test)
+{
+ var receivedLoadEvent = false;
+ var receivedSourceOpenEvent = false;
+
+ window.addEventListener("load", test.step_func(function() {
+ assert_false(receivedLoadEvent, "window should not receive multiple load events");
+ receivedLoadEvent = true;
+ assert_equals(document.readyState, "complete", "document should be complete");
+ if (receivedLoadEvent && receivedSourceOpenEvent) {
+ test.done();
+ }
+ }));
+
+ assert_equals(document.readyState, "loading", "document should not be complete yet");
+ var video = document.createElement("video");
+ var mediaSource = new MediaSource();
+
+ // |video| should stop delaying the load event long and complete the MediaSource attachment
+ // before either a "progress", "stalled" or "suspend" event are enqueued.
+ video.addEventListener("suspend", test.unreached_func("unexpected 'suspend' event"));
+ video.addEventListener("stalled", test.unreached_func("unexpected 'stalled' event"));
+ video.addEventListener("progress", test.unreached_func("unexpected 'progress' event"));
+
+ // No error is expected.
+ video.addEventListener("error", test.unreached_func("unexpected 'error' event"));
+
+ mediaSource.addEventListener("sourceopen", test.step_func(function() {
+ assert_false(receivedSourceOpenEvent, "only one sourceopen event should occur in this test");
+ receivedSourceOpenEvent = true;
+ assert_equals(video.networkState, video.NETWORK_LOADING);
+ assert_equals(video.readyState, video.HAVE_NOTHING);
+ if (receivedLoadEvent && receivedSourceOpenEvent) {
+ test.done();
+ }
+ }));
+
+ var mediaSourceURL = URL.createObjectURL(mediaSource);
+ video.src = mediaSourceURL;
+ test.add_cleanup(function() { URL.revokeObjectURL(mediaSourceURL); });
+}, "MediaSource attachment should immediately stop delaying the load event");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-avtracks.html b/testing/web-platform/tests/media-source/mediasource-avtracks.html
new file mode 100644
index 0000000000..26ae5bcfe3
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-avtracks.html
@@ -0,0 +1,124 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ sourceBuffer.appendBuffer(initSegment);
+ test.expectEvent(sourceBuffer.audioTracks, "addtrack", "sourceBuffer.audioTracks addtrack event");
+ test.expectEvent(sourceBuffer.videoTracks, "addtrack", "sourceBuffer.videoTracks addtrack event");
+ test.expectEvent(mediaElement.audioTracks, "addtrack", "mediaElement.audioTracks addtrack event");
+ test.expectEvent(mediaElement.videoTracks, "addtrack", "mediaElement.videoTracks addtrack event");
+ test.expectEvent(mediaElement, "loadedmetadata", "loadedmetadata done.");
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBuffer.videoTracks.length, 1, "videoTracks.length");
+ assert_equals(sourceBuffer.videoTracks[0].kind, "main", "videoTrack.kind");
+ assert_equals(sourceBuffer.videoTracks[0].label, "", "videoTrack.label");
+ assert_equals(sourceBuffer.videoTracks[0].language, "eng", "videoTrack.language");
+ assert_equals(sourceBuffer.videoTracks[0].sourceBuffer, sourceBuffer, "videoTrack.sourceBuffer");
+ // The first video track is selected by default.
+ assert_true(sourceBuffer.videoTracks[0].selected, "sourceBuffer.videoTracks[0].selected");
+
+ assert_equals(sourceBuffer.audioTracks.length, 1, "audioTracks.length");
+ assert_equals(sourceBuffer.audioTracks[0].kind, "main", "audioTrack.kind");
+ assert_equals(sourceBuffer.audioTracks[0].label, "", "audioTrack.label");
+ assert_equals(sourceBuffer.audioTracks[0].language, "eng", "audioTrack.language");
+ assert_equals(sourceBuffer.audioTracks[0].sourceBuffer, sourceBuffer, "audioTrack.sourceBuffer");
+ // The first audio track is enabled by default.
+ assert_true(sourceBuffer.audioTracks[0].enabled, "sourceBuffer.audioTracks[0].enabled");
+
+ assert_not_equals(sourceBuffer.audioTracks[0].id, sourceBuffer.videoTracks[0].id, "track ids must be unique");
+
+ assert_equals(mediaElement.videoTracks.length, 1, "videoTracks.length");
+ assert_equals(mediaElement.videoTracks[0], sourceBuffer.videoTracks[0], "mediaElement.videoTrack == sourceBuffer.videoTrack");
+
+ assert_equals(mediaElement.audioTracks.length, 1, "audioTracks.length");
+ assert_equals(mediaElement.audioTracks[0], sourceBuffer.audioTracks[0], "mediaElement.audioTrack == sourceBuffer.audioTrack");
+
+ test.done();
+ });
+ }, "Check that media tracks and their properties are populated properly");
+
+ function verifyTrackRemoval(test, mediaElement, mediaSource, sourceBuffer, trackRemovalAction, successCallback) {
+ assert_equals(sourceBuffer.audioTracks.length, 1, "audioTracks.length");
+ assert_true(sourceBuffer.audioTracks[0].enabled, "sourceBuffer.audioTracks[0].enabled");
+ assert_equals(sourceBuffer.videoTracks.length, 1, "videoTracks.length");
+ assert_true(sourceBuffer.videoTracks[0].selected, "sourceBuffer.videoTracks[0].selected");
+
+ var audioTrack = sourceBuffer.audioTracks[0];
+ var videoTrack = sourceBuffer.videoTracks[0];
+
+ // Verify removetrack events.
+ test.expectEvent(sourceBuffer.audioTracks, "removetrack", "sourceBuffer.audioTracks removetrack event");
+ test.expectEvent(sourceBuffer.videoTracks, "removetrack", "sourceBuffer.videoTracks removetrack event");
+ test.expectEvent(mediaElement.audioTracks, "removetrack", "mediaElement.audioTracks removetrack event");
+ test.expectEvent(mediaElement.videoTracks, "removetrack", "mediaElement.videoTracks removetrack event");
+
+ // Removing enabled audio track and selected video track should fire "change" events on mediaElement track lists.
+ test.expectEvent(mediaElement.audioTracks, "change", "mediaElement.audioTracks changed.");
+ test.expectEvent(mediaElement.videoTracks, "change", "mediaElement.videoTracks changed.");
+
+ trackRemovalAction();
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.sourceBuffers.length, 0, "mediaSource.sourceBuffers.length");
+ assert_equals(mediaElement.videoTracks.length, 0, "videoTracks.length");
+ assert_equals(mediaElement.audioTracks.length, 0, "audioTracks.length");
+ assert_equals(sourceBuffer.videoTracks.length, 0, "videoTracks.length");
+ assert_equals(sourceBuffer.audioTracks.length, 0, "audioTracks.length");
+ // Since audio and video tracks have been removed, their .sourceBuffer property should be null now.
+ assert_equals(audioTrack.sourceBuffer, null, "audioTrack.sourceBuffer");
+ assert_equals(videoTrack.sourceBuffer, null, "videoTrack.sourceBuffer");
+ test.done();
+ });
+ }
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ sourceBuffer.appendBuffer(initSegment);
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ test.waitForExpectedEvents(function()
+ {
+ verifyTrackRemoval(test, mediaElement, mediaSource, sourceBuffer, test.step_func(function ()
+ {
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ }));
+ });
+ }, "Media tracks must be removed when the SourceBuffer is removed from the MediaSource");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ sourceBuffer.appendBuffer(initSegment);
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ test.waitForExpectedEvents(function()
+ {
+ verifyTrackRemoval(test, mediaElement, mediaSource, sourceBuffer, test.step_func(function ()
+ {
+ mediaElement.src = "";
+ }));
+ });
+ }, "Media tracks must be removed when the HTMLMediaElement.src is changed");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ sourceBuffer.appendBuffer(initSegment);
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ test.waitForExpectedEvents(function()
+ {
+ verifyTrackRemoval(test, mediaElement, mediaSource, sourceBuffer, test.step_func(function ()
+ {
+ mediaElement.load();
+ }));
+ });
+ }, "Media tracks must be removed when HTMLMediaElement.load() is called");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-buffered.html b/testing/web-platform/tests/media-source/mediasource-buffered.html
new file mode 100644
index 0000000000..7015fc6b61
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-buffered.html
@@ -0,0 +1,233 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBuffer.buffered test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ var subType = MediaSourceUtil.getSubType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ var manifestFilenameA = subType + "/test-a-128k-44100Hz-1ch-manifest.json";
+ var manifestFilenameB = subType + "/test-v-128k-320x240-30fps-10kfr-manifest.json";
+
+ // Audio track expectations
+ var expectationsA = {
+ webm: "{ [0.000, 2.023) }",
+ mp4: "{ [0.000, 2.043) }",
+ };
+
+ // Video track expectations
+ var expectationsB = {
+ webm: "{ [0.000, 2.001) }",
+ mp4: "{ [0.067, 2.067) }",
+ };
+
+ // Audio and Video intersection expectations.
+ // https://w3c.github.io/media-source/index.html#dom-sourcebuffer-buffered
+ // When mediaSource.readyState is "ended", then set the end time on the last range in track ranges to highest end time.
+ var expectationsC = {
+ webm: ["{ [0.000, 2.001) }", "{ [0.000, 2.023) }"],
+ mp4: ["{ [0.067, 2.043) }", "{ [0.067, 2.067) }"]
+ };
+
+ function mediaSourceDemuxedTest(callback, description)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.pause();
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener("ended", test.step_func_done());
+
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameA, function(typeA, dataA)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameB, function(typeB, dataB)
+ {
+ mediaSource.addSourceBuffer(typeA);
+ mediaSource.addSourceBuffer(typeB);
+ assert_equals(mediaSource.sourceBuffers.length, 2);
+
+ callback(test, mediaElement, mediaSource, dataA, dataB);
+ });
+ });
+ }, description);
+ };
+
+ function appendData(test, mediaSource, dataA, dataB, callback)
+ {
+ var sourceBufferA = mediaSource.sourceBuffers[0];
+ var sourceBufferB = mediaSource.sourceBuffers[1];
+
+ test.expectEvent(sourceBufferA, "update");
+ test.expectEvent(sourceBufferA, "updateend");
+ sourceBufferA.appendBuffer(dataA);
+
+ test.expectEvent(sourceBufferB, "update");
+ test.expectEvent(sourceBufferB, "updateend");
+ sourceBufferB.appendBuffer(dataB);
+
+ test.waitForExpectedEvents(function()
+ {
+ callback();
+ });
+ }
+
+ mediaSourceDemuxedTest(function(test, mediaElement, mediaSource, dataA, dataB) {
+ test.expectEvent(mediaElement, "loadedmetadata");
+ appendData(test, mediaSource, dataA, dataB, function()
+ {
+ var expectedBeforeEndOfStreamIntersection = expectationsC[subType][0];
+ var expectedAfterEndOfStreamIntersection = expectationsC[subType][1];
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectationsA[subType], "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaSource.activeSourceBuffers[1], expectationsB[subType], "mediaSource.activeSourceBuffers[1]");
+ assertBufferedEquals(mediaElement, expectedBeforeEndOfStreamIntersection, "mediaElement.buffered");
+
+ mediaSource.endOfStream();
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectationsA[subType], "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaSource.activeSourceBuffers[1], expectationsB[subType], "mediaSource.activeSourceBuffers[1]");
+ assertBufferedEquals(mediaElement, expectedAfterEndOfStreamIntersection, "mediaElement.buffered");
+
+ test.done();
+ });
+ }, "Demuxed content with different lengths");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.pause();
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener("ended", test.step_func_done());
+
+ MediaSourceUtil.fetchManifestAndData(test, subType + "/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json", function(type, data)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(type);
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(data);
+
+ test.waitForExpectedEvents(function()
+ {
+ var expectationsAV = {
+ webm: ["{ [0.003, 2.004) }", "{ [0.003, 2.023) }"],
+ mp4: ["{ [0.067, 2.043) }", "{ [0.067, 2.067) }"],
+ };
+
+ var expectedBeforeEndOfStream = expectationsAV[subType][0];
+ var expectedAfterEndOfStream = expectationsAV[subType][1];
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectedBeforeEndOfStream, "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaElement, expectedBeforeEndOfStream, "mediaElement.buffered");
+
+ mediaSource.endOfStream();
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectedAfterEndOfStream, "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaElement, expectedAfterEndOfStream, "mediaElement.buffered");
+
+ test.done();
+ });
+ });
+ }, "Muxed content with different lengths");
+
+ mediaSourceDemuxedTest(function(test, mediaElement, mediaSource, dataA, dataB) {
+ var dataBSize = {
+ webm: 318,
+ mp4: 835,
+ };
+ test.expectEvent(mediaElement, "loadedmetadata");
+ appendData(test, mediaSource, dataA, dataB.subarray(0, dataBSize[subType]), function()
+ {
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectationsA[subType], "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaSource.activeSourceBuffers[1], "{ }", "mediaSource.activeSourceBuffers[1]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+
+ mediaSource.endOfStream();
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], expectationsA[subType], "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaSource.activeSourceBuffers[1], "{ }", "mediaSource.activeSourceBuffers[1]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+
+ test.done();
+ });
+ }, "Demuxed content with an empty buffered range on one SourceBuffer");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.pause();
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener("ended", test.step_func_done());
+
+ MediaSourceUtil.fetchManifestAndData(test, subType + "/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json", function(type, data)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(type);
+ test.expectEvent(mediaElement, "loadedmetadata");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(data.subarray(0, 4052));
+
+ test.waitForExpectedEvents(function()
+ {
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], "{ }", "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+
+ mediaSource.endOfStream();
+
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], "{ }", "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+
+ test.done();
+ });
+ });
+ }, "Muxed content empty buffered ranges.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.pause();
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener("ended", test.step_func_done());
+
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assertBufferedEquals(mediaSource.sourceBuffers[0], "{ }", "mediaSource.sourceBuffers[0]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+ test.done();
+
+ }, "Get buffered range when sourcebuffer is empty.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+
+ test.expectEvent(mediaElement, "loadedmetadata");
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assertBufferedEquals(mediaSource.sourceBuffers[0], "{ }", "mediaSource.sourceBuffers[0]");
+ assertBufferedEquals(mediaSource.activeSourceBuffers[0], "{ }", "mediaSource.activeSourceBuffers[0]");
+ assertBufferedEquals(mediaElement, "{ }", "mediaElement.buffered");
+ test.done();
+ });
+
+ }, "Get buffered range when only init segment is appended.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "SourceBuffer removed.");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.buffered; },
+ "get sourceBuffer.buffered throws an exception for InvalidStateError.");
+ test.done();
+ });
+ }, "Get buffered range after removing sourcebuffer.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype-play-implicit.html b/testing/web-platform/tests/media-source/mediasource-changetype-play-implicit.html
new file mode 100644
index 0000000000..c186361e79
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype-play-implicit.html
@@ -0,0 +1,89 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Exercise implicit changeType for supported test types, using mime types WITH and WITHOUT codecs for addSourceBuffer.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-changetype-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+// Helper that generates implicit codec switching tests for a pair of media
+// types, with full codecs in the original addSourceBuffer calls, and
+// separately without full codecs parameters in the original addSourceBuffer
+// calls.
+function generateTestsForMediaPair(type1, type2) {
+ // Implicit changeType across bytestream formats is not expected to be
+ // supported, so skip those tests' generation.
+ if (type1.mime_subtype != type2.mime_subtype)
+ return;
+
+ assert_equals(type1.is_video, type2.is_video,
+ "Types must both be audio or both be video");
+ test_description_prefix = "Test " + (type1.is_video ? "video" : "audio") +
+ "-only implicit changeType for " + type1.type + " <-> " + type2.type;
+
+ mediaSourceChangeTypeTest(
+ type1, type2,
+ test_description_prefix,
+ { implicit_changetype: true } );
+
+ // Skip test generation if the relaxed types are already fully specified and
+ // tested, above.
+ if (type1.type == type1.relaxed_type &&
+ type2.type == type2.relaxed_type) {
+ return;
+ }
+
+ mediaSourceChangeTypeTest(
+ type1, type2,
+ test_description_prefix +
+ " (using types without codecs parameters for addSourceBuffer)",
+ { use_relaxed_mime_types: true, implicit_changetype: true } );
+}
+
+function generateImplicitChangeTypeTests(audio_types, video_types) {
+ async_test((test) => {
+ // Require at least 1 pair of different audio-only or video-only test media
+ // files sharing same bytestream format.
+ assert_true(audio_types.length > 1 || video_types.length > 1,
+ "Browser doesn't support enough test media");
+
+ // Count the number of unique bytestream formats used in each of audio_types
+ // and video_types.
+ let audio_formats = new Set(Array.from(audio_types, t => t.mime_subtype));
+ let video_formats = new Set(Array.from(video_types, t => t.mime_subtype));
+ assert_true(audio_types.length > audio_formats.size ||
+ video_types.length > video_formats.size,
+ "Browser doesn't support at least 2 audio-only or 2 video-only test " +
+ "media with same bytestream format");
+
+ test.done();
+ }, "Check if browser supports enough test media types and pairs of " +
+ "audio-only or video-only media with same bytestream format");
+
+ // Generate audio-only tests
+ for (let audio1 of audio_types) {
+ for (let audio2 of audio_types) {
+ generateTestsForMediaPair(audio1, audio2);
+ }
+ }
+
+ // Generate video-only tests
+ for (let video1 of video_types) {
+ for (let video2 of video_types) {
+ generateTestsForMediaPair(video1, video2);
+ }
+ }
+}
+
+findSupportedChangeTypeTestTypes(generateImplicitChangeTypeTests);
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype-play-negative.html b/testing/web-platform/tests/media-source/mediasource-changetype-play-negative.html
new file mode 100644
index 0000000000..f74e12945a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype-play-negative.html
@@ -0,0 +1,122 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Exercise scenarios expected to fail for changeType for supported test types, using mime types WITH and WITHOUT codecs.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-changetype-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+function generateNegativeChangeTypeTests(audio_types, video_types) {
+ async_test((test) => {
+ assert_true(audio_types.length > 0 && video_types.length > 0,
+ "Browser doesn't support at least one audio and one video test media for audio<->video changeType negative tests");
+
+ let audio_formats = new Set(Array.from(audio_types, t => t.mime_subtype));
+ let video_formats = new Set(Array.from(video_types, t => t.mime_subtype));
+
+ let has_intersected_av_format = false;
+ for (let elem of audio_formats) {
+ if (video_formats.has(elem))
+ has_intersected_av_format = true;
+ }
+ assert_true(has_intersected_av_format,
+ "Browser doesn't support at least 1 audio-only and 1 video-only test media with same bytestream formats");
+
+ test.done();
+ }, "Check if browser supports enough test media types across audio and video for changeType negative tests");
+
+ // Generate audio<->video changeType tests that should not succeed in
+ // reaching successful end of playback because the class of media (audio or
+ // video) must remain the same across either an implicit or explicit
+ // changeType.
+ for (let audio_type of audio_types) {
+ for (let video_type of video_types) {
+ // For implicit changeType negative tests, only pairs of test media files
+ // using the same bytestream format are used, because it is not
+ // guaranteed that all implementations can be expected to reliably detect
+ // an implicit switch of bytestream format (for example, MP3 parsers
+ // might skip invalid input bytes without issuing error.)
+ let do_implicit_changetype = (audio_type.mime_subtype ==
+ video_type.mime_subtype);
+
+ mediaSourceChangeTypeTest(
+ audio_type, video_type,
+ "Negative test audio<->video changeType for " +
+ audio_type.type + " <-> " + video_type.type,
+ { negative_test: true } );
+ mediaSourceChangeTypeTest(
+ video_type, audio_type,
+ "Negative test video<->audio changeType for " +
+ video_type.type + " <-> " + audio_type.type,
+ { negative_test: true } );
+
+ if (do_implicit_changetype) {
+ mediaSourceChangeTypeTest(
+ audio_type, video_type,
+ "Negative test audio<->video implicit changeType for " +
+ audio_type.type + " <-> " + video_type.type,
+ { implicit_changetype: true, negative_test: true } );
+ mediaSourceChangeTypeTest(
+ video_type, audio_type,
+ "Negative test video<->audio implicit changeType for " +
+ video_type.type + " <-> " + audio_type.type,
+ { implicit_changetype: true, negative_test: true } );
+ }
+
+ // Skip tests where the relaxed type is already fully specified and
+ // tested, above.
+ if (audio_type.type == audio_type.relaxed_type &&
+ video_type.type == video_type.relaxed_type) {
+ continue;
+ }
+
+ mediaSourceChangeTypeTest(
+ audio_type, video_type,
+ "Negative test audio<->video changeType for " +
+ audio_type.type + " <-> " + video_type.type +
+ " (using types without codecs parameters)",
+ { use_relaxed_mime_types: true, negative_test: true } );
+ mediaSourceChangeTypeTest(
+ video_type, audio_type,
+ "Negative test video<->audio changeType for " +
+ video_type.type + " <-> " + audio_type.type +
+ " (using types without codecs parameters)",
+ { use_relaxed_mime_types: true, negative_test: true } );
+
+ if (do_implicit_changetype) {
+ mediaSourceChangeTypeTest(
+ audio_type, video_type,
+ "Negative test audio<->video implicit changeType for " +
+ audio_type.type + " <-> " + video_type.type +
+ " (without codecs parameters for addSourceBuffer)",
+ { use_relaxed_mime_types: true,
+ implicit_changetype: true,
+ negative_test: true
+ } );
+
+ mediaSourceChangeTypeTest(
+ video_type, audio_type,
+ "Negative test video<->audio implicit changeType for " +
+ video_type.type + " <-> " + audio_type.type +
+ " (without codecs parameters for addSourceBuffer)",
+ { use_relaxed_mime_types: true,
+ implicit_changetype: true,
+ negative_test: true
+ } );
+ }
+ }
+ }
+}
+
+findSupportedChangeTypeTestTypes(generateNegativeChangeTypeTests);
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype-play-without-codecs-parameter.html b/testing/web-platform/tests/media-source/mediasource-changetype-play-without-codecs-parameter.html
new file mode 100644
index 0000000000..f802b155f7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype-play-without-codecs-parameter.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Exercise changeType for supported test types, using mime types WITHOUT codecs for addSourceBuffer and changeType.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-changetype-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+function generateChangeTypeTests(audio_types, video_types) {
+ async_test((test) => {
+ assert_true(audio_types.length > 1, "Browser doesn't support at least 2 types of audio test media");
+ assert_true(video_types.length > 1, "Browser doesn't support at least 2 types of video test media");
+ test.done();
+ }, "Check if browser supports enough test media types");
+
+ // Generate audio-only changeType tests
+ for (let audio1 of audio_types) {
+ for (let audio2 of audio_types) {
+ mediaSourceChangeTypeTest(
+ audio1, audio2,
+ "Test audio-only changeType for " +
+ audio1.type + " <-> " + audio2.type +
+ " (using types without codecs parameters)",
+ { use_relaxed_mime_types: true } );
+ }
+ }
+
+ // Generate video-only changeType tests
+ for (let video1 of video_types) {
+ for (let video2 of video_types) {
+ mediaSourceChangeTypeTest(
+ video1, video2,
+ "Test video-only changeType for " +
+ video1.type + " <-> " + video2.type +
+ " (using types without codecs parameters)",
+ { use_relaxed_mime_types: true });
+ }
+ }
+}
+
+findSupportedChangeTypeTestTypes(generateChangeTypeTests);
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype-play.html b/testing/web-platform/tests/media-source/mediasource-changetype-play.html
new file mode 100644
index 0000000000..26a67c3270
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype-play.html
@@ -0,0 +1,48 @@
+<!DOCTYPE html>
+<!-- Copyright © 2018 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Exercise changeType for supported test types, using mime types WITH codecs (if applicable) for addSourceBuffer and changeType.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-changetype-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+function generateChangeTypeTests(audio_types, video_types) {
+ async_test((test) => {
+ assert_true(audio_types.length > 1, "Browser doesn't support at least 2 types of audio test media");
+ assert_true(video_types.length > 1, "Browser doesn't support at least 2 types of video test media");
+ test.done();
+ }, "Check if browser supports enough test media types");
+
+ // Generate audio-only changeType tests
+ for (let audio1 of audio_types) {
+ for (let audio2 of audio_types) {
+ mediaSourceChangeTypeTest(
+ audio1, audio2,
+ "Test audio-only changeType for " +
+ audio1.type + " <-> " + audio2.type);
+ }
+ }
+
+ // Generate video-only changeType tests
+ for (let video1 of video_types) {
+ for (let video2 of video_types) {
+ mediaSourceChangeTypeTest(
+ video1, video2,
+ "Test video-only changeType for " +
+ video1.type + " <-> " + video2.type);
+ }
+ }
+}
+
+findSupportedChangeTypeTestTypes(generateChangeTypeTests);
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype-util.js b/testing/web-platform/tests/media-source/mediasource-changetype-util.js
new file mode 100644
index 0000000000..024af027ed
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype-util.js
@@ -0,0 +1,359 @@
+// Copyright © 2018 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang).
+
+function findSupportedChangeTypeTestTypes(cb) {
+ // Changetype test media metadata.
+ // type: fully specified mime type (and codecs substring if the bytestream
+ // format does not forbid codecs parameter). This is required for use with
+ // isTypeSupported, and if supported, should work with both addSourceBuffer
+ // and changeType (unless implementation has restrictions).
+ //
+ // relaxed_type: possibly ambiguous mime type/subtype without any codecs
+ // substring. This is the same as type minus any codecs substring.
+ //
+ // mime_subtype: the subtype of the mime type in type and relaxed_type. Across
+ // types registered in the bytestream format registry
+ // (https://www.w3.org/TR/mse-byte-stream-format-registry/), this is
+ // currently sufficient to describe uniquely which test media share the same
+ // bytestream format for use in implicit changeType testing.
+ //
+ // is_video: All test media currently is single track. This describes whether
+ // or not the track is video.
+ //
+ // url: Relative location of the test media file.
+ //
+ // The next two items enable more reliable test media splicing test logic that
+ // prevents buffered range gaps at the splice points.
+ // start_time: Some test media begins at a time later than 0.0 seconds. This
+ // is the start time of the media.
+ // keyframe_interval: Some test media contains out-of-order PTS versus DTS
+ // coded frames. In those cases, a constant keyframe_interval is needed to
+ // prevent severely truncating out-of-order GOPs at splice points.
+ let CHANGE_TYPE_MEDIA_LIST = [
+ {
+ type: 'video/webm; codecs="vp8"',
+ relaxed_type: 'video/webm',
+ mime_subtype: 'webm',
+ is_video: true,
+ url: 'webm/test-v-128k-320x240-24fps-8kfr.webm',
+ start_time: 0.0
+ // keyframe_interval: N/A since DTS==PTS so overlap-removal of
+ // non-keyframe should not produce a buffered range gap.
+ },
+ {
+ type: 'video/webm; codecs="vp9"',
+ relaxed_type: 'video/webm',
+ mime_subtype: 'webm',
+ is_video: true,
+ url: 'webm/test-vp9.webm',
+ start_time: 0.0
+ // keyframe_interval: N/A since DTS==PTS so overlap-removal of
+ // non-keyframe should not produce a buffered range gap.
+ },
+ {
+ type: 'video/mp4; codecs="avc1.4D4001"',
+ relaxed_type: 'video/mp4',
+ mime_subtype: 'mp4',
+ is_video: true,
+ url: 'mp4/test-v-128k-320x240-24fps-8kfr.mp4',
+ start_time: 0.083333,
+ keyframe_interval: 0.333333
+ },
+ {
+ type: 'audio/webm; codecs="vorbis"',
+ relaxed_type: 'audio/webm',
+ mime_subtype: 'webm',
+ is_video: false,
+ url: 'webm/test-a-128k-44100Hz-1ch.webm',
+ start_time: 0.0
+ // keyframe_interval: N/A since DTS==PTS so overlap-removal of
+ // non-keyframe should not produce a buffered range gap. Also, all frames
+ // in this media are key-frames (it is audio).
+ },
+ {
+ type: 'audio/mp4; codecs="mp4a.40.2"',
+ relaxed_type: 'audio/mp4',
+ mime_subtype: 'mp4',
+ is_video: false,
+ url: 'mp4/test-a-128k-44100Hz-1ch.mp4',
+ start_time: 0.0
+ // keyframe_interval: N/A since DTS==PTS so overlap-removal of
+ // non-keyframe should not produce a buffered range gap. Also, all frames
+ // in this media are key-frames (it is audio).
+ },
+ {
+ type: 'audio/mpeg',
+ relaxed_type: 'audio/mpeg',
+ mime_subtype: 'mpeg',
+ is_video: false,
+ url: 'mp3/sound_5.mp3',
+ start_time: 0.0
+ // keyframe_interval: N/A since DTS==PTS so overlap-removal of
+ // non-keyframe should not produce a buffered range gap. Also, all frames
+ // in this media are key-frames (it is audio).
+ }
+ ];
+
+ let audio_result = [];
+ let video_result = [];
+
+ for (let i = 0; i < CHANGE_TYPE_MEDIA_LIST.length; ++i) {
+ let media = CHANGE_TYPE_MEDIA_LIST[i];
+ if (window.MediaSource && MediaSource.isTypeSupported(media.type)) {
+ if (media.is_video === true) {
+ video_result.push(media);
+ } else {
+ audio_result.push(media);
+ }
+ }
+ }
+
+ cb(audio_result, video_result);
+}
+
+function appendBuffer(test, sourceBuffer, data) {
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(data);
+}
+
+function trimBuffered(test, mediaSource, sourceBuffer, minimumPreviousDuration, newDuration, skip_duration_prechecks) {
+ if (!skip_duration_prechecks) {
+ assert_less_than(newDuration, minimumPreviousDuration,
+ "trimBuffered newDuration must be less than minimumPreviousDuration");
+ assert_less_than(minimumPreviousDuration, mediaSource.duration,
+ "trimBuffered minimumPreviousDuration must be less than mediaSource.duration");
+ }
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.remove(newDuration, Infinity);
+}
+
+function trimDuration(test, mediaElement, mediaSource, newDuration, skip_duration_prechecks) {
+ if (!skip_duration_prechecks) {
+ assert_less_than(newDuration, mediaSource.duration,
+ "trimDuration newDuration must be less than mediaSource.duration");
+ }
+ test.expectEvent(mediaElement, "durationchange");
+ mediaSource.duration = newDuration;
+}
+
+function runChangeTypeTest(test, mediaElement, mediaSource, metadataA, typeA, dataA, metadataB, typeB, dataB,
+ implicit_changetype, negative_test) {
+ // Some streams, like the MP4 video stream, contain presentation times for
+ // frames out of order versus their decode times. If we overlap-append the
+ // latter part of such a stream's GOP presentation interval, a significant
+ // portion of decode-dependent non-keyframes with earlier presentation
+ // intervals could be removed and a presentation time buffered range gap could
+ // be introduced. Therefore, we test overlap appends with the overlaps
+ // occurring very near to a keyframe's presentation time to reduce the
+ // possibility of such a gap. None of the test media is SAP-Type-2, so we
+ // don't take any extra care to avoid gaps that may occur when
+ // splice-overlapping such GOP sequences that aren't SAP-Type-1.
+ // TODO(wolenetz): https://github.com/w3c/media-source/issues/160 could
+ // greatly simplify this problem by allowing us play through these small gaps.
+ //
+ // typeA and typeB may be underspecified for use with isTypeSupported, but
+ // this helper does not use isTypeSupported. typeA and typeB must work (even
+ // if missing codec specific substrings) with addSourceBuffer (just typeA) and
+ // changeType (both typeA and typeB).
+ //
+ // See also mediaSourceChangeTypeTest's options argument for the meanings of
+ // implicit_changetype and negative_test.
+
+ function findSafeOffset(targetTime, overlappedMediaMetadata, overlappedStartTime, overlappingMediaMetadata) {
+ assert_greater_than_equal(targetTime, overlappedStartTime,
+ "findSafeOffset targetTime must be greater than or equal to overlappedStartTime");
+
+ let offset = targetTime;
+ if ("start_time" in overlappingMediaMetadata) {
+ offset -= overlappingMediaMetadata["start_time"];
+ }
+
+ // If the media being overlapped is not out-of-order decode, then we can
+ // safely use the supplied times.
+ if (!("keyframe_interval" in overlappedMediaMetadata)) {
+ return { "offset": offset, "adjustedTime": targetTime };
+ }
+
+ // Otherwise, we're overlapping media that needs care to prevent introducing
+ // a gap. Adjust offset and adjustedTime to make the overlapping media start
+ // at the next overlapped media keyframe at or after targetTime.
+ let gopsToRetain = Math.ceil((targetTime - overlappedStartTime) / overlappedMediaMetadata["keyframe_interval"]);
+ let adjustedTime = overlappedStartTime + gopsToRetain * overlappedMediaMetadata["keyframe_interval"];
+
+ assert_greater_than_equal(adjustedTime, targetTime,
+ "findSafeOffset adjustedTime must be greater than or equal to targetTime");
+ offset += adjustedTime - targetTime;
+ return { "offset": offset, "adjustedTime": adjustedTime };
+ }
+
+ // Note, none of the current negative changeType tests should fail the initial addSourceBuffer.
+ let sourceBuffer = mediaSource.addSourceBuffer(typeA);
+
+ // Add error event listeners to sourceBuffer. The caller of this helper may
+ // also have installed error event listeners on mediaElement.
+ if (negative_test) {
+ sourceBuffer.addEventListener("error", test.step_func_done());
+ } else {
+ sourceBuffer.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ }
+
+ // In either negative test or not, the first appendBuffer should succeed.
+ appendBuffer(test, sourceBuffer, dataA);
+ let lastStart = metadataA["start_time"];
+ if (lastStart == null) {
+ lastStart = 0.0;
+ }
+
+ // changeType A->B and append the first media of B effectively at 0.5 seconds
+ // (or at the first keyframe in A at or after 0.5 seconds if it has
+ // keyframe_interval defined).
+ test.waitForExpectedEvents(() => {
+ let safeOffset = findSafeOffset(0.5, metadataA, lastStart, metadataB);
+ lastStart = safeOffset["adjustedTime"];
+ if (!implicit_changetype) {
+ try { sourceBuffer.changeType(typeB); } catch(err) {
+ if (negative_test)
+ test.done();
+ else
+ throw err;
+ }
+ }
+ sourceBuffer.timestampOffset = safeOffset["offset"];
+ appendBuffer(test, sourceBuffer, dataB);
+ });
+
+ // changeType B->B and append B starting at 1.0 seconds (or at the first
+ // keyframe in B at or after 1.0 seconds if it has keyframe_interval defined).
+ test.waitForExpectedEvents(() => {
+ assert_less_than(lastStart, 1.0,
+ "changeType B->B lastStart must be less than 1.0");
+ let safeOffset = findSafeOffset(1.0, metadataB, lastStart, metadataB);
+ lastStart = safeOffset["adjustedTime"];
+ if (!implicit_changetype) {
+ try { sourceBuffer.changeType(typeB); } catch(err) {
+ if (negative_test)
+ test.done();
+ else
+ throw err;
+ }
+ }
+ sourceBuffer.timestampOffset = safeOffset["offset"];
+ appendBuffer(test, sourceBuffer, dataB);
+ });
+
+ // changeType B->A and append A starting at 1.5 seconds (or at the first
+ // keyframe in B at or after 1.5 seconds if it has keyframe_interval defined).
+ test.waitForExpectedEvents(() => {
+ assert_less_than(lastStart, 1.5,
+ "changeType B->A lastStart must be less than 1.5");
+ let safeOffset = findSafeOffset(1.5, metadataB, lastStart, metadataA);
+ // Retain the previous lastStart because the next block will append data
+ // which begins between that start time and this block's start time.
+ if (!implicit_changetype) {
+ try { sourceBuffer.changeType(typeA); } catch(err) {
+ if (negative_test)
+ test.done();
+ else
+ throw err;
+ }
+ }
+ sourceBuffer.timestampOffset = safeOffset["offset"];
+ appendBuffer(test, sourceBuffer, dataA);
+ });
+
+ // changeType A->A and append A starting at 1.3 seconds (or at the first
+ // keyframe in B at or after 1.3 seconds if it has keyframe_interval defined).
+ test.waitForExpectedEvents(() => {
+ assert_less_than(lastStart, 1.3,
+ "changeType A->A lastStart must be less than 1.3");
+ // Our next append will begin by overlapping some of metadataB, then some of
+ // metadataA.
+ let safeOffset = findSafeOffset(1.3, metadataB, lastStart, metadataA);
+ if (!implicit_changetype) {
+ try { sourceBuffer.changeType(typeA); } catch(err) {
+ if (negative_test)
+ test.done();
+ else
+ throw err;
+ }
+ }
+ sourceBuffer.timestampOffset = safeOffset["offset"];
+ appendBuffer(test, sourceBuffer, dataA);
+ });
+
+ // Trim duration to 2 seconds, then play through to end.
+ test.waitForExpectedEvents(() => {
+ // If negative testing, then skip fragile assertions.
+ trimBuffered(test, mediaSource, sourceBuffer, 2.1, 2, negative_test);
+ });
+
+ test.waitForExpectedEvents(() => {
+ // If negative testing, then skip fragile assertions.
+ trimDuration(test, mediaElement, mediaSource, 2, negative_test);
+ });
+
+ test.waitForExpectedEvents(() => {
+ assert_equals(mediaElement.currentTime, 0, "currentTime must be 0");
+ test.expectEvent(mediaSource, "sourceended");
+ test.expectEvent(mediaElement, "play");
+ test.expectEvent(mediaElement, "ended");
+ mediaSource.endOfStream();
+ mediaElement.play();
+ });
+
+ test.waitForExpectedEvents(() => {
+ if (negative_test)
+ assert_unreached("Received 'ended' while negative testing.");
+ else
+ test.done();
+ });
+}
+
+// options.use_relaxed_mime_types : boolean (defaults to false).
+// If true, the initial addSourceBuffer and any changeType calls will use the
+// relaxed_type in metadataA and metadataB instead of the full type in the
+// metadata.
+// options.implicit_changetype : boolean (defaults to false).
+// If true, no changeType calls will be used. Instead, the test media files
+// are expected to begin with an initialization segment and end at a segment
+// boundary (no abort() call is issued by this test to reset the
+// SourceBuffer's parser).
+// options.negative_test : boolean (defaults to false).
+// If true, the test is expected to hit error amongst one of the following
+// areas: addSourceBuffer, appendBuffer (synchronous or asynchronous error),
+// changeType, playback to end of buffered media. If 'ended' is received
+// without error otherwise already occurring, then fail the test. Otherwise,
+// pass the test on receipt of error. Continue to consider timeouts as test
+// failures.
+function mediaSourceChangeTypeTest(metadataA, metadataB, description, options = {}) {
+ mediasource_test((test, mediaElement, mediaSource) => {
+ let typeA = metadataA.type;
+ let typeB = metadataB.type;
+ if (options.hasOwnProperty("use_relaxed_mime_types") &&
+ options.use_relaxed_mime_types === true) {
+ typeA = metadataA.relaxed_type;
+ typeB = metadataB.relaxed_type;
+ }
+ let implicit_changetype = options.hasOwnProperty("implicit_changetype") &&
+ options.implicit_changetype === true;
+ let negative_test = options.hasOwnProperty("negative_test") &&
+ options.negative_test === true;
+
+ mediaElement.pause();
+ if (negative_test) {
+ mediaElement.addEventListener("error", test.step_func_done());
+ } else {
+ mediaElement.addEventListener("error",
+ test.unreached_func("Unexpected event 'error'"));
+ }
+ MediaSourceUtil.loadBinaryData(test, metadataA.url, (dataA) => {
+ MediaSourceUtil.loadBinaryData(test, metadataB.url, (dataB) => {
+ runChangeTypeTest(
+ test, mediaElement, mediaSource,
+ metadataA, typeA, dataA, metadataB, typeB, dataB,
+ implicit_changetype, negative_test);
+ });
+ });
+ }, description);
+}
diff --git a/testing/web-platform/tests/media-source/mediasource-changetype.html b/testing/web-platform/tests/media-source/mediasource-changetype.html
new file mode 100644
index 0000000000..25618cdc1e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-changetype.html
@@ -0,0 +1,149 @@
+<!DOCTYPE html>
+<!-- Copyright © 2018 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <title>SourceBuffer.changeType() test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.changeType("");
+ }, "changeType");
+
+ test.done();
+}, "Test changeType with an empty type.");
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.changeType(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ }, "changeType");
+
+ test.done();
+}, "Test changeType after SourceBuffer removed from mediaSource.");
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+ assert_true(sourceBuffer.updating, "Updating flag set when a buffer is appended.");
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.changeType(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ }, "changeType");
+
+ test.done();
+}, "Test changeType while update pending.");
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ var unsupported_type = null;
+ assert_false(MediaSource.isTypeSupported(unsupported_type), "null MIME type is not expected to be supported.");
+
+ assert_throws_dom("NotSupportedError", function()
+ {
+ sourceBuffer.changeType(unsupported_type);
+ }, "changeType");
+
+ test.done();
+}, "Test changeType with null type.");
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ var unsupported_type = 'invalidType';
+ assert_false(MediaSource.isTypeSupported(unsupported_type), unsupported_type + " is not expected to be supported.");
+
+ assert_throws_dom("NotSupportedError", function()
+ {
+ sourceBuffer.changeType(unsupported_type);
+ }, "changeType");
+
+ test.done();
+}, "Test changeType with unsupported type.");
+
+mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+{
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, "ended");
+
+ test.expectEvent(mediaSource, "sourceopen");
+ sourceBuffer.changeType(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_equals(mediaSource.readyState, "open");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+}, "Test changeType transitioning readyState from 'ended' to 'open'.");
+
+mediasource_test(function(test, mediaElement, mediaSource) {
+ var sequenceType = "audio/aac";
+ if (!MediaSource.isTypeSupported(sequenceType)) {
+ sequenceType = "audio/mpeg";
+ assert_true(MediaSource.isTypeSupported(sequenceType),
+ "No bytestream that generates timestamps is supported, aborting test");
+ }
+
+ assert_not_equals(MediaSourceUtil.AUDIO_ONLY_TYPE, sequenceType,
+ "This test requires distinct audio-only types");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ assert_equals(sourceBuffer.mode, "segments",
+ "None of the audioOnlyTypes in the test util generate timestamps, but mode is incorrectly set");
+
+ sourceBuffer.changeType(sequenceType);
+ assert_equals(sourceBuffer.mode, "sequence",
+ "Mode is not updated correctly for a bytestream that generates timestamps");
+
+ test.done();
+}, "Test changeType sets mode to sequence for change to type that generates timestamps");
+
+mediasource_test(function(test, mediaElement, mediaSource) {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assert_equals(sourceBuffer.mode, "segments",
+ "None of the audioOnlyTypes in the test util generate timestamps, but mode is incorrectly set");
+ sourceBuffer.changeType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ assert_equals(sourceBuffer.mode, "segments",
+ "Previous segments mode is not retained correctly for changeType to one that doesn't generate timestamps");
+
+ sourceBuffer.mode = "sequence";
+ assert_equals(sourceBuffer.mode, "sequence", "mode should be sequence now");
+ sourceBuffer.changeType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ assert_equals(sourceBuffer.mode, "sequence",
+ "Previous sequence mode is not retained correctly for changeType to one that doesn't generate timestamps");
+
+ test.done();
+}, "Test changeType retains previous mode when changing to type that doesn't generate timestamps");
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-closed.html b/testing/web-platform/tests/media-source/mediasource-closed.html
new file mode 100644
index 0000000000..79d522f2f9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-closed.html
@@ -0,0 +1,140 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.readyState equals "closed" test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_equals(mediaSource.sourceBuffers.length, 0, "sourceBuffers is empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "activeSourceBuffers is empty");
+ assert_equals(mediaSource.readyState, "closed", "readyState is 'closed'");
+ assert_true(isNaN(mediaSource.duration), "duration is NaN");
+ }, "Test attribute values on a closed MediaSource object.");
+
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE); },
+ "addSourceBuffer() throws an exception when closed.");
+ }, "Test addSourceBuffer() while closed.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ // Setup a handler to run when the MediaSource closes.
+ mediaSource.addEventListener('sourceclose', test.step_func(function (event)
+ {
+ assert_equals(mediaSource.sourceBuffers.length, 0, "sourceBuffers is empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "activeSourceBuffers is empty");
+ assert_equals(mediaSource.readyState, "closed", "readyState is 'closed'");
+ assert_throws_dom("NotFoundError",
+ function() { mediaSource.removeSourceBuffer(sourceBuffer); },
+ "removeSourceBuffer() throws an exception when closed.");
+ test.done();
+ }));
+
+ // Trigger the MediaSource to close.
+ mediaElement.src = "";
+ }, "Test removeSourceBuffer() while closed.");
+
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.endOfStream(); },
+ "endOfStream() throws an exception when closed.");
+ }, "Test endOfStream() while closed.");
+
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.endOfStream("decode"); },
+ "endOfStream(decode) throws an exception when closed.");
+ }, "Test endOfStream(decode) while closed.");
+
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.endOfStream("network"); },
+ "endOfStream(network) throws an exception when closed.");
+ }, "Test endOfStream(network) while closed.");
+
+ test(function ()
+ {
+ var mediaSource = new MediaSource();
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.duration = 10; },
+ "Setting duration throws an exception when closed.");
+ }, "Test setting duration while closed.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assert_equals(mediaSource.readyState, "open", "readyState is 'open'");
+ // Setup a handler to run when the MediaSource closes.
+ mediaSource.addEventListener("sourceclose", test.step_func(function (event)
+ {
+ assert_equals(mediaSource.readyState, "closed", "readyState is 'closed'");
+ assert_throws_dom("InvalidStateError",
+ function() { mediaSource.duration = 10; },
+ "Setting duration when closed throws an exception");
+ test.done();
+ }));
+
+ // Trigger the MediaSource to close.
+ mediaElement.src = "";
+ }, "Test setting duration while open->closed.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assert_equals(mediaSource.readyState, "open", "readyState is 'open'");
+ // Setup a handler to run when the MediaSource closes.
+ mediaSource.addEventListener("sourceclose", test.step_func(function (event)
+ {
+ assert_equals(mediaSource.readyState, "closed", "readyState is 'closed'");
+ assert_true(isNaN(mediaSource.duration), "duration is NaN");
+ test.done();
+ }));
+
+ // Trigger the MediaSource to close.
+ mediaElement.src = "";
+ }, "Test getting duration while open->closed.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assert_equals(mediaSource.readyState, "open", "readyState is open");
+
+ // Setup a handler to run when the MediaSource closes.
+ mediaSource.addEventListener("sourceclose", test.step_func(function (event)
+ {
+ assert_equals(mediaSource.readyState, "closed", "readyState is closed");
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.abort(); },
+ "sourceBuffer.abort() throws INVALID_STATE_ERROR");
+ test.done();
+ }));
+
+ // Trigger the MediaSource to close.
+ mediaElement.src = "";
+ }, "Test sourcebuffer.abort when closed.");
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-a-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-a-bitrate.html
new file mode 100644
index 0000000000..47e4c804ee
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-a-bitrate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 audio-only bitrate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "a-128k-44100Hz-1ch", "a-192k-44100Hz-1ch", "Tests mp4 audio-only bitrate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-audio-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-audio-bitrate.html
new file mode 100644
index 0000000000..960720768b
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-audio-bitrate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 muxed audio &amp; video with an audio bitrate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "av-448k-44100Hz-1ch-640x480-30fps-10kfr", "Tests mp4 audio bitrate changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-framesize.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-framesize.html
new file mode 100644
index 0000000000..7ef2bb0bbf
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-framesize.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 muxed audio &amp; video with a video frame size change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "av-384k-44100Hz-1ch-320x240-30fps-10kfr", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "Tests mp4 frame size changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-video-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-video-bitrate.html
new file mode 100644
index 0000000000..8c74e75389
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-av-video-bitrate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 muxed audio &amp; video with a video bitrate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "av-640k-44100Hz-1ch-640x480-30fps-10kfr", "Tests mp4 video bitrate changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-bitrate.html
new file mode 100644
index 0000000000..705c5bd3ca
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-bitrate.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 video-only bitrate change.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "v-128k-320x240-30fps-10kfr", "v-256k-320x240-30fps-10kfr", "Tests mp4 video-only bitrate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framerate.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framerate.html
new file mode 100644
index 0000000000..1d07fa9482
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framerate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 video-only frame rate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "v-128k-320x240-24fps-8kfr", "v-128k-320x240-30fps-10kfr", "Tests mp4 video-only frame rate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framesize.html b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framesize.html
new file mode 100644
index 0000000000..78e6823e23
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-mp4-v-framesize.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MP4 video-only frame size change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("mp4", "v-128k-320x240-30fps-10kfr", "v-128k-640x480-30fps-10kfr", "Tests mp4 video-only frame size changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-a-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-a-bitrate.html
new file mode 100644
index 0000000000..cc351cd307
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-a-bitrate.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM audio-only bitrate change.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "a-128k-44100Hz-1ch", "a-192k-44100Hz-1ch", "Tests webm audio-only bitrate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-audio-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-audio-bitrate.html
new file mode 100644
index 0000000000..d98069d072
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-audio-bitrate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM muxed audio &amp; video with an audio bitrate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "av-448k-44100Hz-1ch-640x480-30fps-10kfr", "Tests webm audio bitrate changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-framesize.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-framesize.html
new file mode 100644
index 0000000000..c37f8c2ed4
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-framesize.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM muxed audio &amp; video with a video frame size change.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "av-384k-44100Hz-1ch-320x240-30fps-10kfr", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "Tests webm frame size changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-video-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-video-bitrate.html
new file mode 100644
index 0000000000..96037c736a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-av-video-bitrate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM muxed audio &amp; video with a video bitrate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "av-384k-44100Hz-1ch-640x480-30fps-10kfr", "av-640k-44100Hz-1ch-640x480-30fps-10kfr", "Tests webm video bitrate changes in multiplexed content.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-bitrate.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-bitrate.html
new file mode 100644
index 0000000000..e194e267d2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-bitrate.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM video-only bitrate change.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "v-128k-320x240-30fps-10kfr", "v-256k-320x240-30fps-10kfr", "Tests webm video-only bitrate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framerate.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framerate.html
new file mode 100644
index 0000000000..7dbdc9c802
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framerate.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM video-only frame rate change.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "v-128k-320x240-24fps-8kfr", "v-128k-320x240-30fps-10kfr", "Tests webm video-only frame rate changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framesize.html b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framesize.html
new file mode 100644
index 0000000000..ca13c78a35
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-change-webm-v-framesize.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>WebM video-only frame size change.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ <script src="mediasource-config-changes.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediaSourceConfigChangeTest("webm", "v-128k-320x240-30fps-10kfr", "v-128k-640x480-30fps-10kfr", "Tests webm video-only frame size changes.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-config-changes.js b/testing/web-platform/tests/media-source/mediasource-config-changes.js
new file mode 100644
index 0000000000..b28aa90f1f
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-config-changes.js
@@ -0,0 +1,116 @@
+// Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang).
+
+// Extract & return the resolution string from a filename, if any.
+function resolutionFromFilename(filename)
+{
+ resolution = filename.replace(/^.*[^0-9]([0-9]+x[0-9]+)[^0-9].*$/, "$1");
+ if (resolution != filename) {
+ return resolution;
+ }
+ return "";
+}
+
+function appendBuffer(test, sourceBuffer, data)
+{
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(data);
+}
+
+function mediaSourceConfigChangeTest(directory, idA, idB, description)
+{
+ var manifestFilenameA = directory + "/test-" + idA + "-manifest.json";
+ var manifestFilenameB = directory + "/test-" + idB + "-manifest.json";
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.pause();
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ var expectResizeEvents = resolutionFromFilename(manifestFilenameA) != resolutionFromFilename(manifestFilenameB);
+ var expectedResizeEventCount = 0;
+
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameA, function(typeA, dataA)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameB, function(typeB, dataB)
+ {
+ assert_equals(typeA, typeB, "Media format types match");
+
+ var sourceBuffer = mediaSource.addSourceBuffer(typeA);
+
+ appendBuffer(test, sourceBuffer, dataA);
+ ++expectedResizeEventCount;
+
+ test.waitForExpectedEvents(function()
+ {
+ // Add the second buffer starting at 0.5 second.
+ sourceBuffer.timestampOffset = 0.5;
+ appendBuffer(test, sourceBuffer, dataB);
+ ++expectedResizeEventCount;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Add the first buffer starting at 1 second.
+ sourceBuffer.timestampOffset = 1;
+ appendBuffer(test, sourceBuffer, dataA);
+ ++expectedResizeEventCount;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Add the second buffer starting at 1.5 second.
+ sourceBuffer.timestampOffset = 1.5;
+ appendBuffer(test, sourceBuffer, dataB);
+ ++expectedResizeEventCount;
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+
+ // Truncate the presentation to a duration of 2 seconds.
+ // First, explicitly remove the media beyond 2 seconds.
+ sourceBuffer.remove(2, Infinity);
+
+ assert_true(sourceBuffer.updating, "sourceBuffer.updating during range removal");
+ test.expectEvent(sourceBuffer, 'updatestart', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'update', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "sourceBuffer.updating prior to duration reduction");
+ assert_greater_than(mediaSource.duration, 2, "duration");
+
+ // Complete the truncation of presentation to 2 second
+ // duration.
+ mediaSource.duration = 2;
+ assert_false(sourceBuffer.updating, "sourceBuffer.updating synchronously after duration reduction");
+
+ test.expectEvent(mediaElement, "durationchange");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+
+ mediaSource.endOfStream();
+
+ assert_false(sourceBuffer.updating, "updating");
+
+ if (expectResizeEvents) {
+ for (var i = 0; i < expectedResizeEventCount; ++i) {
+ test.expectEvent(mediaElement, "resize");
+ }
+ }
+ test.expectEvent(mediaElement, "ended");
+ mediaElement.play();
+ });
+
+ test.waitForExpectedEvents(function() {
+ test.done();
+ });
+ });
+ });
+ }, description);
+};
diff --git a/testing/web-platform/tests/media-source/mediasource-correct-frames-after-reappend.html b/testing/web-platform/tests/media-source/mediasource-correct-frames-after-reappend.html
new file mode 100644
index 0000000000..5c0f2e1119
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-correct-frames-after-reappend.html
@@ -0,0 +1,162 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Igalia. -->
+<html>
+<head>
+ <title>Frame checking test for MSE playback in presence of a reappend.</title>
+ <meta name="timeout" content="long">
+ <meta name="charset" content="UTF-8">
+ <link rel="author" title="Alicia Boya García" href="mailto:aboya@igalia.com">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<canvas id="test-canvas"></canvas>
+<script>
+ function waitForEventPromise(element, event) {
+ return new Promise(resolve => {
+ function handler(ev) {
+ element.removeEventListener(event, handler);
+ resolve(ev);
+ }
+ element.addEventListener(event, handler);
+ });
+ }
+
+ function appendBufferPromise(sourceBuffer, data) {
+ sourceBuffer.appendBuffer(data);
+ return waitForEventPromise(sourceBuffer, "update");
+ }
+
+ function waitForPlayerToReachTimePromise(mediaElement, time) {
+ return new Promise(resolve => {
+ function timeupdate() {
+ if (mediaElement.currentTime < time)
+ return;
+
+ mediaElement.removeEventListener("timeupdate", timeupdate);
+ resolve();
+ }
+ mediaElement.addEventListener("timeupdate", timeupdate);
+ });
+ }
+
+ function readPixel(imageData, x, y) {
+ return {
+ r: imageData.data[4 * (y * imageData.width + x)],
+ g: imageData.data[1 + 4 * (y * imageData.width + x)],
+ b: imageData.data[2 + 4 * (y * imageData.width + x)],
+ a: imageData.data[3 + 4 * (y * imageData.width + x)],
+ };
+ }
+
+ function isPixelLit(pixel) {
+ const threshold = 200; // out of 255
+ return pixel.r >= threshold && pixel.g >= threshold && pixel.b >= threshold;
+ }
+
+ // The test video has a few gray boxes. Each box interval (1 second) a new box is lit white and a different note
+ // is played. This test makes sure the right number of lit boxes and the right note are played at the right time.
+ const totalBoxes = 7;
+ const boxInterval = 1; // seconds
+
+ const videoWidth = 320;
+ const videoHeight = 240;
+ const boxesY = 210;
+ const boxSide = 20;
+ const boxMargin = 20;
+ const allBoxesWidth = totalBoxes * boxSide + (totalBoxes - 1) * boxMargin;
+ const boxesX = new Array(totalBoxes).fill(undefined)
+ .map((_, i) => (videoWidth - allBoxesWidth) / 2 + boxSide / 2 + i * (boxSide + boxMargin));
+
+ // Sound starts playing A4 (440 Hz) and goes one chromatic note up with every box lit.
+ // By comparing the player position to both the amount of boxes lit and the note played we can detect A/V
+ // synchronization issues automatically.
+ const noteFrequencies = new Array(1 + totalBoxes).fill(undefined)
+ .map((_, i) => 440 * Math.pow(Math.pow(2, 1 / 12), i));
+
+ // We also check the first second [0, 1) where no boxes are lit, therefore we start counting at -1 to do the check
+ // for zero lit boxes.
+ let boxesLitSoFar = -1;
+
+ mediasource_test(async function (test, mediaElement, mediaSource) {
+ const canvas = document.getElementById("test-canvas");
+ const canvasCtx = canvas.getContext("2d");
+ canvas.width = videoWidth;
+ canvas.height = videoHeight;
+
+ const videoData = await (await fetch("mp4/test-boxes-video.mp4")).arrayBuffer();
+ const audioData = (await (await fetch("mp4/test-boxes-audio.mp4")).arrayBuffer());
+
+ const videoSb = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.4d401f"');
+ const audioSb = mediaSource.addSourceBuffer('audio/mp4; codecs="mp4a.40.2"');
+
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener('ended', onEnded);
+ mediaElement.addEventListener('timeupdate', onTimeUpdate);
+
+ await appendBufferPromise(videoSb, videoData);
+ await appendBufferPromise(audioSb, audioData);
+ mediaElement.play();
+
+ audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+ source = audioCtx.createMediaElementSource(mediaElement);
+ analyser = audioCtx.createAnalyser();
+ analyser.fftSize = 8192;
+ source.connect(analyser);
+ analyser.connect(audioCtx.destination);
+
+ const freqDomainArray = new Float32Array(analyser.frequencyBinCount);
+
+ function checkNoteBeingPlayed() {
+ const expectedNoteFrequency = noteFrequencies[boxesLitSoFar];
+
+ analyser.getFloatFrequencyData(freqDomainArray);
+ const maxBin = freqDomainArray.reduce((prev, curValue, i) =>
+ curValue > prev.value ? {index: i, value: curValue} : prev,
+ {index: -1, value: -Infinity});
+ const binFrequencyWidth = audioCtx.sampleRate / analyser.fftSize;
+ const binFreq = maxBin.index * binFrequencyWidth;
+
+ assert_true(Math.abs(expectedNoteFrequency - binFreq) <= binFrequencyWidth,
+ `The note being played matches the expected one (boxes lit: ${boxesLitSoFar}, ${expectedNoteFrequency.toFixed(1)} Hz)` +
+ `, found ~${binFreq.toFixed(1)} Hz`);
+ }
+
+ function countLitBoxesInCurrentVideoFrame() {
+ canvasCtx.drawImage(mediaElement, 0, 0);
+ const imageData = canvasCtx.getImageData(0, 0, videoWidth, videoHeight);
+ const lights = boxesX.map(boxX => isPixelLit(readPixel(imageData, boxX, boxesY)));
+ let litBoxes = 0;
+ for (let i = 0; i < lights.length; i++) {
+ if (lights[i])
+ litBoxes++;
+ }
+ for (let i = litBoxes; i < lights.length; i++) {
+ assert_false(lights[i], 'After the first non-lit box, all boxes must non-lit');
+ }
+ return litBoxes;
+ }
+
+ await waitForPlayerToReachTimePromise(mediaElement, 2.5);
+ await appendBufferPromise(audioSb, audioData);
+ mediaSource.endOfStream();
+
+ function onTimeUpdate() {
+ const graceTime = 0.5;
+ if (mediaElement.currentTime >= (1 + boxesLitSoFar) * boxInterval + graceTime && boxesLitSoFar < totalBoxes) {
+ assert_equals(countLitBoxesInCurrentVideoFrame(), boxesLitSoFar + 1, "Num of lit boxes:");
+ boxesLitSoFar++;
+ checkNoteBeingPlayed();
+ }
+ }
+
+ function onEnded() {
+ assert_equals(boxesLitSoFar, totalBoxes, "Boxes lit at video ended event");
+ test.done();
+ }
+ }, "Test the expected frames are played at the expected times, even in presence of reappends");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-correct-frames.html b/testing/web-platform/tests/media-source/mediasource-correct-frames.html
new file mode 100644
index 0000000000..4ef3f4605e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-correct-frames.html
@@ -0,0 +1,146 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Igalia. -->
+<html>
+<head>
+ <title>Frame checking test for simple MSE playback.</title>
+ <meta name="timeout" content="long">
+ <meta name="charset" content="UTF-8">
+ <link rel="author" title="Alicia Boya García" href="mailto:aboya@igalia.com">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<canvas id="test-canvas"></canvas>
+<script>
+ function waitForEventPromise(element, event) {
+ return new Promise(resolve => {
+ function handler(ev) {
+ element.removeEventListener(event, handler);
+ resolve(ev);
+ }
+ element.addEventListener(event, handler);
+ });
+ }
+
+ function appendBufferPromise(sourceBuffer, data) {
+ sourceBuffer.appendBuffer(data);
+ return waitForEventPromise(sourceBuffer, "update");
+ }
+
+ function readPixel(imageData, x, y) {
+ return {
+ r: imageData.data[4 * (y * imageData.width + x)],
+ g: imageData.data[1 + 4 * (y * imageData.width + x)],
+ b: imageData.data[2 + 4 * (y * imageData.width + x)],
+ a: imageData.data[3 + 4 * (y * imageData.width + x)],
+ };
+ }
+
+ function isPixelLit(pixel) {
+ const threshold = 200; // out of 255
+ return pixel.r >= threshold && pixel.g >= threshold && pixel.b >= threshold;
+ }
+
+ // The test video has a few gray boxes. Each box interval (1 second) a new box is lit white and a different note
+ // is played. This test makes sure the right number of lit boxes and the right note are played at the right time.
+ const totalBoxes = 7;
+ const boxInterval = 1; // seconds
+
+ const videoWidth = 320;
+ const videoHeight = 240;
+ const boxesY = 210;
+ const boxSide = 20;
+ const boxMargin = 20;
+ const allBoxesWidth = totalBoxes * boxSide + (totalBoxes - 1) * boxMargin;
+ const boxesX = new Array(totalBoxes).fill(undefined)
+ .map((_, i) => (videoWidth - allBoxesWidth) / 2 + boxSide / 2 + i * (boxSide + boxMargin));
+
+ // Sound starts playing A4 (440 Hz) and goes one chromatic note up with every box lit.
+ // By comparing the player position to both the amount of boxes lit and the note played we can detect A/V
+ // synchronization issues automatically.
+ const noteFrequencies = new Array(1 + totalBoxes).fill(undefined)
+ .map((_, i) => 440 * Math.pow(Math.pow(2, 1 / 12), i));
+
+ // We also check the first second [0, 1) where no boxes are lit, therefore we start counting at -1 to do the check
+ // for zero lit boxes.
+ let boxesLitSoFar = -1;
+
+ mediasource_test(async function (test, mediaElement, mediaSource) {
+ const canvas = document.getElementById("test-canvas");
+ const canvasCtx = canvas.getContext("2d");
+ canvas.width = videoWidth;
+ canvas.height = videoHeight;
+
+ const videoData = await (await fetch("mp4/test-boxes-video.mp4")).arrayBuffer();
+ const audioData = (await (await fetch("mp4/test-boxes-audio.mp4")).arrayBuffer());
+
+ const videoSb = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.4d401f"');
+ const audioSb = mediaSource.addSourceBuffer('audio/mp4; codecs="mp4a.40.2"');
+
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener('ended', onEnded);
+ mediaElement.addEventListener('timeupdate', onTimeUpdate);
+
+ await appendBufferPromise(videoSb, videoData);
+ await appendBufferPromise(audioSb, audioData);
+ mediaSource.endOfStream();
+ mediaElement.play();
+
+ const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+ const source = audioCtx.createMediaElementSource(mediaElement);
+ const analyser = audioCtx.createAnalyser();
+ analyser.fftSize = 8192;
+ source.connect(analyser);
+ analyser.connect(audioCtx.destination);
+
+ const freqDomainArray = new Float32Array(analyser.frequencyBinCount);
+
+ function checkNoteBeingPlayed() {
+ const expectedNoteFrequency = noteFrequencies[boxesLitSoFar];
+
+ analyser.getFloatFrequencyData(freqDomainArray);
+ const maxBin = freqDomainArray.reduce((prev, curValue, i) =>
+ curValue > prev.value ? {index: i, value: curValue} : prev,
+ {index: -1, value: -Infinity});
+ const binFrequencyWidth = audioCtx.sampleRate / analyser.fftSize;
+ const binFreq = maxBin.index * binFrequencyWidth;
+
+ assert_true(Math.abs(expectedNoteFrequency - binFreq) <= binFrequencyWidth,
+ `The note being played matches the expected one (boxes lit: ${boxesLitSoFar}, ${expectedNoteFrequency.toFixed(1)} Hz)` +
+ `, found ~${binFreq.toFixed(1)} Hz`);
+ }
+
+ function countLitBoxesInCurrentVideoFrame() {
+ canvasCtx.drawImage(mediaElement, 0, 0);
+ const imageData = canvasCtx.getImageData(0, 0, videoWidth, videoHeight);
+ const lights = boxesX.map(boxX => isPixelLit(readPixel(imageData, boxX, boxesY)));
+ let litBoxes = 0;
+ for (let i = 0; i < lights.length; i++) {
+ if (lights[i])
+ litBoxes++;
+ }
+ for (let i = litBoxes; i < lights.length; i++) {
+ assert_false(lights[i], 'After the first non-lit box, all boxes must non-lit');
+ }
+ return litBoxes;
+ }
+
+ function onTimeUpdate() {
+ const graceTime = 0.5;
+ if (mediaElement.currentTime >= (1 + boxesLitSoFar) * boxInterval + graceTime && boxesLitSoFar < totalBoxes) {
+ assert_equals(countLitBoxesInCurrentVideoFrame(), boxesLitSoFar + 1, "Num of lit boxes:");
+ boxesLitSoFar++;
+ checkNoteBeingPlayed();
+ }
+ }
+
+ function onEnded() {
+ assert_equals(boxesLitSoFar, totalBoxes, "Boxes lit at video ended event");
+ test.done();
+ }
+ }, "Test the expected frames are played at the expected times");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-detach.html b/testing/web-platform/tests/media-source/mediasource-detach.html
new file mode 100644
index 0000000000..3f87d9a3d5
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-detach.html
@@ -0,0 +1,54 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ function mediasource_detach_test(testFunction, description)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var segmentInfo = MediaSourceUtil.SEGMENT_INFO;
+ var sourceBuffer = mediaSource.addSourceBuffer(segmentInfo.type);
+
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+ assert_equals(mediaSource.readyState, 'open');
+
+ mediaSource.addEventListener('sourceclose', test.step_func(function (event)
+ {
+ assert_equals(mediaSource.sourceBuffers.length, 0, 'sourceBuffers is empty');
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, 'activeSourceBuffers is empty');
+ assert_equals(mediaSource.readyState, 'closed', 'readyState is "closed"');
+ assert_true(Number.isNaN(mediaSource.duration), 'duration is NaN');
+ test.done();
+ }));
+
+ MediaSourceUtil.loadBinaryData(test, segmentInfo.url, function(mediaData)
+ {
+ testFunction(test, mediaElement, mediaSource, sourceBuffer, mediaData);
+ });
+ }, description);
+ }
+
+ mediasource_detach_test(function(test, mediaElement, mediaSource, sourceBuffer, mediaData)
+ {
+ mediaElement.load();
+ }, 'Test media.load() before appending data will trigger MediaSource detaching from a media element.');
+
+ mediasource_detach_test(function(test, mediaElement, mediaSource, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, 'updateend', 'updateend after appending data');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'media element loadedmetata');
+ test.waitForExpectedEvents(() =>
+ {
+ assert_greater_than(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING,
+ 'media element readyState is greater than "HAVE_NOTHING"');
+ assert_false(sourceBuffer.updating, 'updating attribute is false');
+ assert_equals(mediaSource.readyState, 'open');
+ mediaElement.load();
+ });
+
+ sourceBuffer.appendBuffer(mediaData);
+ }, 'Test media.load() after appending data will trigger MediaSource detaching from a media element.');
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-duration-boundaryconditions.html b/testing/web-platform/tests/media-source/mediasource-duration-boundaryconditions.html
new file mode 100644
index 0000000000..e5be9f18fc
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-duration-boundaryconditions.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.duration boundary condition test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ function DurationBoundaryConditionTest(testDurationValue, expectedError, description)
+ {
+ return mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ // Append initialization segment.
+ test.expectEvent(sourceBuffer, "updateend", "sourceBuffer");
+ test.expectEvent(mediaElement, "loadedmetadata", "mediaElement");
+ sourceBuffer.appendBuffer(MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init));
+ test.waitForExpectedEvents(function()
+ {
+ if (expectedError) {
+ assert_throws_js(expectedError,
+ function() { mediaSource.duration = testDurationValue; },
+ "mediaSource.duration assignment throws an exception for " + testDurationValue);
+ test.done();
+ return;
+ }
+
+ mediaSource.duration = testDurationValue;
+
+ assert_equals(mediaSource.duration, testDurationValue, "mediaSource.duration");
+ assert_equals(mediaElement.duration, testDurationValue, "mediaElement.duration");
+
+ test.expectEvent(mediaElement, "durationchange", "mediaElement");
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.duration, testDurationValue, "mediaSource.duration");
+ assert_equals(mediaElement.duration, testDurationValue, "mediaElement.duration");
+ test.done();
+ });
+ });
+
+ }, description);
+ }
+
+ DurationBoundaryConditionTest(Math.pow(2, 31) - 1, null, "Set duration to 2^31 - 1");
+ DurationBoundaryConditionTest(1, null, "Set duration to 1");
+ DurationBoundaryConditionTest(Number.MAX_VALUE, null, "Set duration to Number.MAX_VALUE");
+ DurationBoundaryConditionTest(Number.MIN_VALUE, null, "Set duration to Number.MIN_VALUE");
+ DurationBoundaryConditionTest(Number.MAX_VALUE - 1, null, "Set duration to Number.MAX_VALUE - 1");
+ DurationBoundaryConditionTest(Number.MIN_VALUE - 1, TypeError, "Set duration to Number.MIN_VALUE - 1");
+ DurationBoundaryConditionTest(Number.POSITIVE_INFINITY, null, "Set duration to Number.POSITIVE_INFINITY");
+ DurationBoundaryConditionTest(Number.NEGATIVE_INFINITY, TypeError, "Set duration to Number.NEGATIVE_INFINITY");
+ DurationBoundaryConditionTest(-1 * Number.MAX_VALUE, TypeError, "Set duration to lowest value.");
+ DurationBoundaryConditionTest(-101.9, TypeError, "Set duration to a negative double.");
+ DurationBoundaryConditionTest(101.9, null, "Set duration to a positive double.");
+ DurationBoundaryConditionTest(0, null, "Set duration to zero");
+ DurationBoundaryConditionTest(NaN, TypeError, "Set duration to NaN");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-duration.html b/testing/web-platform/tests/media-source/mediasource-duration.html
new file mode 100644
index 0000000000..b4619da38b
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-duration.html
@@ -0,0 +1,383 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.duration &amp; HTMLMediaElement.duration test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+ var subType = MediaSourceUtil.getSubType(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ var manifestFilenameAudio = subType + "/test-a-128k-44100Hz-1ch-manifest.json";
+ var manifestFilenameVideo = subType + "/test-v-128k-320x240-30fps-10kfr-manifest.json";
+
+ function mediasource_truncated_duration_seek_test(testFunction, description, options)
+ {
+ return mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(segmentInfo.duration, 2, 'Sufficient test media duration');
+
+ var fullDuration = segmentInfo.duration;
+ var seekTo = fullDuration / 2.0;
+ var truncatedDuration = seekTo / 2.0;
+
+ mediaElement.play();
+
+ // Append all the segments
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'playing', 'Playing triggered');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(mediaElement, 'seeking', 'seeking to seekTo');
+ test.expectEvent(mediaElement, 'timeupdate', 'timeupdate while seeking to seekTo');
+ test.expectEvent(mediaElement, 'seeked', 'seeked to seekTo');
+ mediaElement.currentTime = seekTo;
+ assert_true(mediaElement.seeking, 'mediaElement.seeking (to seekTo)');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.currentTime, seekTo, 'Playback time has reached seekTo');
+ assert_false(mediaElement.seeking, 'mediaElement.seeking after seeked to seekTo');
+
+ assert_false(sourceBuffer.updating, 'sourceBuffer.updating');
+
+ sourceBuffer.remove(truncatedDuration, Infinity);
+
+ assert_true(sourceBuffer.updating, 'sourceBuffer.updating');
+ test.expectEvent(sourceBuffer, 'updatestart', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'update', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.currentTime, seekTo, 'Playback time has reached seekTo');
+ assert_false(sourceBuffer.updating, 'sourceBuffer.updating');
+
+ // Remove will not remove partial frames, so the resulting duration is the highest end time
+ // of the track buffer ranges, and is greater than or equal to the highest coded frame
+ // presentation time across all track buffer ranges. We first obtain the intersected track buffer
+ // ranges end time and set the duration to that value.
+ truncatedDuration = sourceBuffer.buffered.end(sourceBuffer.buffered.length-1);
+ assert_less_than(truncatedDuration, seekTo,
+ 'remove has removed the current playback position from at least one track buffer');
+
+ mediaSource.duration = truncatedDuration;
+ test.expectEvent(mediaElement, 'seeking', 'Seeking to truncated duration');
+
+ // The actual duration may be slightly higher than truncatedDuration because the
+ // duration is adjusted upwards if necessary to be the highest end time across all track buffer
+ // ranges. Allow that increase here.
+ assert_less_than_equal(truncatedDuration, mediaSource.duration,
+ 'Duration should not be less than what was set');
+ // Here, we assume no test media coded frame duration is longer than 100ms.
+ assert_less_than(mediaSource.duration - truncatedDuration, 0.1);
+
+ // Update our truncatedDuration to be the actual new duration.
+ truncatedDuration = mediaSource.duration;
+
+ assert_true(mediaElement.seeking, 'Seeking after setting truncatedDuration');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.currentTime, truncatedDuration,
+ 'Playback time is truncatedDuration while seeking');
+ assert_true(mediaElement.seeking, 'mediaElement.seeking while seeking to truncatedDuration');
+ assert_equals(mediaElement.duration, truncatedDuration,
+ 'mediaElement truncatedDuration during seek to it');
+ assert_equals(mediaSource.duration, truncatedDuration,
+ 'mediaSource truncatedDuration during seek to it');
+
+ testFunction(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData,
+ truncatedDuration);
+ });
+ }, description, options);
+ }
+
+ mediasource_truncated_duration_seek_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer,
+ mediaData, truncatedDuration)
+ {
+ // Tests that duration truncation below current playback position
+ // starts seek to new duration.
+ test.done();
+ }, 'Test seek starts on duration truncation below currentTime');
+
+ mediasource_truncated_duration_seek_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer,
+ mediaData, truncatedDuration)
+ {
+ // The duration has been truncated at this point, and there is an
+ // outstanding seek pending.
+ test.expectEvent(sourceBuffer, 'updateend', 'updateend after appending more data');
+
+ test.expectEvent(mediaElement, 'timeupdate', 'timeupdate while finishing seek to truncatedDuration');
+ test.expectEvent(mediaElement, 'seeked', 'seeked to truncatedDuration');
+
+ // Allow seek to complete by appending more data beginning at the
+ // truncated duration timestamp.
+ sourceBuffer.timestampOffset = truncatedDuration;
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.currentTime, truncatedDuration,
+ 'Playback time has reached truncatedDuration');
+ assert_approx_equals(mediaElement.duration, truncatedDuration + segmentInfo.duration, 0.05,
+ 'mediaElement duration increased by new append');
+ assert_equals(mediaSource.duration, mediaElement.duration,
+ 'mediaSource duration increased by new append');
+ assert_false(mediaElement.seeking, 'mediaElement.seeking after seeked to truncatedDuration');
+
+ test.done();
+ });
+ }, 'Test appendBuffer completes previous seek to truncated duration');
+
+ mediasource_truncated_duration_seek_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer,
+ mediaData, truncatedDuration)
+ {
+ // The duration has been truncated at this point, and there is an
+ // outstanding seek pending.
+ test.expectEvent(mediaSource, 'sourceended', 'endOfStream acknowledged');
+
+ test.expectEvent(mediaElement, 'timeupdate', 'timeupdate while finishing seek to truncatedDuration');
+ test.expectEvent(mediaElement, 'seeked', 'seeked to truncatedDuration');
+
+ // Call endOfStream() to complete the pending seek.
+ mediaSource.endOfStream();
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than_equal(mediaElement.currentTime, truncatedDuration,
+ 'Playback time has reached truncatedDuration');
+ // The mediaSource.readyState is "ended". Buffered ranges have been adjusted to the longest track.
+ truncatedDuration = sourceBuffer.buffered.end(sourceBuffer.buffered.length-1);
+ assert_equals(mediaElement.duration, truncatedDuration,
+ 'mediaElement truncatedDuration after seek to it');
+ assert_equals(mediaSource.duration, truncatedDuration,
+ 'mediaSource truncatedDuration after seek to it');
+ assert_false(mediaElement.seeking, 'mediaElement.seeking after seeked to truncatedDuration');
+
+ test.done();
+ });
+ }, 'Test endOfStream completes previous seek to truncated duration');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(segmentInfo.duration, 2, 'Sufficient test media duration');
+
+ var fullDuration = segmentInfo.duration;
+ var newDuration = 0.5;
+
+ var expectedDurationChangeEventCount = 1;
+ var durationchangeEventCounter = 0;
+ var durationchangeEventHandler = test.step_func(function(event)
+ {
+ assert_equals(mediaElement.duration, mediaSource.duration, 'mediaElement newDuration');
+ // Final duration may be greater than originally set as per MSE's 2.4.6 Duration change
+ // Adjust newDuration accordingly.
+ assert_less_than_equal(newDuration, mediaSource.duration, 'mediaSource newDuration');
+ durationchangeEventCounter++;
+ });
+
+ mediaElement.play();
+
+ // Append all the segments
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'playing', 'Playing triggered');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_less_than(mediaElement.currentTime, newDuration / 2, 'mediaElement currentTime');
+
+ assert_false(sourceBuffer.updating, "updating");
+
+ // Truncate duration. This should result in one 'durationchange' fired.
+ sourceBuffer.remove(newDuration, Infinity);
+
+ assert_true(sourceBuffer.updating, "updating");
+ test.expectEvent(sourceBuffer, 'updatestart', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'update', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Media load also fires 'durationchange' event, so only start counting them now.
+ mediaElement.addEventListener('durationchange', durationchangeEventHandler);
+
+ assert_false(sourceBuffer.updating, "updating");
+
+ // Truncate duration. This should result in one 'durationchange' fired.
+ mediaSource.duration = newDuration;
+
+ // Final duration may be greater than originally set as per MSE's 2.4.6 Duration change
+ // Adjust newDuration accordingly.
+ assert_true(newDuration <= mediaSource.duration, 'adjusted duration');
+ newDuration = mediaSource.duration;
+
+ // Set duration again to make sure it does not trigger another 'durationchange' event.
+ mediaSource.duration = newDuration;
+
+ // Mark endOfStream so that playback can reach 'ended' at the new duration.
+ test.expectEvent(mediaSource, 'sourceended', 'endOfStream acknowledged');
+ mediaSource.endOfStream();
+
+ // endOfStream can change duration slightly.
+ // Allow for one more 'durationchange' event only in this case.
+ var currentDuration = mediaSource.duration;
+ if (currentDuration != newDuration) {
+ newDuration = currentDuration;
+ ++expectedDurationChangeEventCount;
+ }
+
+ // Allow media to play to end while counting 'durationchange' events.
+ test.expectEvent(mediaElement, 'ended', 'Playback ended');
+ test.waitForExpectedEvents(function()
+ {
+ mediaElement.removeEventListener('durationchange', durationchangeEventHandler);
+ assert_equals(durationchangeEventCounter, expectedDurationChangeEventCount, 'durationchanges');
+ test.done();
+ });
+ });
+ }, 'Test setting same duration multiple times does not fire duplicate durationchange');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(segmentInfo.duration, 2, 'Sufficient test media duration');
+
+ var fullDuration = segmentInfo.duration;
+ var newDuration = fullDuration / 2;
+
+ // Append all the segments
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'mediaElement');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ mediaSource.duration = newDuration;
+ }, "duration");
+
+ test.done();
+ });
+ }, 'Test setting the duration to less than the highest starting presentation timestamp will throw');
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function(typeAudio, dataAudio)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameVideo, function(typeVideo, dataVideo)
+ {
+ var sourceBufferAudio = mediaSource.addSourceBuffer(typeAudio);
+ var sourceBufferVideo = mediaSource.addSourceBuffer(typeVideo);
+ var newDuration = 1.2;
+
+ sourceBufferAudio.appendWindowEnd = 2.0;
+ sourceBufferAudio.appendWindowStart = newDuration / 2.0;
+ sourceBufferAudio.appendBuffer(dataAudio);
+
+ sourceBufferVideo.appendWindowEnd = 2.0;
+ sourceBufferVideo.appendWindowStart = newDuration * 1.3;
+ sourceBufferVideo.appendBuffer(dataVideo);
+
+ test.expectEvent(sourceBufferAudio, "updateend");
+ test.expectEvent(sourceBufferVideo, "updateend");
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBufferAudio.buffered.length, 1);
+ assert_equals(sourceBufferVideo.buffered.length, 1);
+ assert_less_than(sourceBufferAudio.buffered.start(0), newDuration);
+ assert_greater_than(sourceBufferVideo.buffered.start(0), newDuration);
+ assert_throws_dom("InvalidStateError", function () { mediaSource.duration = newDuration; });
+ test.done();
+ });
+ });
+ });
+ }, "Truncating the duration throws an InvalidStateError exception when new duration is less than the highest buffered range start time of one of the track buffers");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameAudio, function(typeAudio, dataAudio)
+ {
+ MediaSourceUtil.fetchManifestAndData(test, manifestFilenameVideo, function(typeVideo, dataVideo)
+ {
+ var sourceBufferAudio = mediaSource.addSourceBuffer(typeAudio);
+ var sourceBufferVideo = mediaSource.addSourceBuffer(typeVideo);
+
+ // Buffer audio [0.8,1.8)
+ sourceBufferAudio.timestampOffset = 0.8;
+ sourceBufferAudio.appendWindowEnd = 1.8;
+ sourceBufferAudio.appendBuffer(dataAudio);
+
+ // Buffer video [1.5,3)
+ sourceBufferVideo.timestampOffset = 1.5;
+ sourceBufferVideo.appendWindowEnd = 3;
+ sourceBufferVideo.appendBuffer(dataVideo);
+
+ test.expectEvent(sourceBufferAudio, "updateend");
+ test.expectEvent(sourceBufferVideo, "updateend");
+ test.waitForExpectedEvents(function()
+ {
+ var newDuration = 2.0;
+
+ // Verify the test setup
+ assert_equals(sourceBufferAudio.buffered.length, 1);
+ assert_equals(sourceBufferVideo.buffered.length, 1);
+ assert_greater_than(sourceBufferAudio.buffered.end(0), 1.5);
+ assert_less_than(sourceBufferAudio.buffered.end(0), newDuration);
+ assert_less_than(sourceBufferVideo.buffered.start(0), newDuration);
+ assert_greater_than(sourceBufferVideo.buffered.end(0), newDuration + 0.5);
+
+ // Verify the expected error
+ // We assume relocated test video has at least one coded
+ // frame presentation interval which fits in [>2.0,>2.5)
+ assert_throws_dom("InvalidStateError", function () { mediaSource.duration = newDuration; });
+ test.done();
+ });
+ });
+ });
+ }, "Truncating the duration throws an InvalidStateError exception when new duration is less than a buffered coded frame presentation time");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_less_than(segmentInfo.duration, 60, 'Sufficient test media duration');
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'updateend', 'Media data appended to the SourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.duration = 60;
+ assert_false(sourceBuffer.updating, 'No SourceBuffer update when duration is increased');
+ test.done();
+ });
+ }, 'Increasing the duration does not trigger any SourceBuffer update');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(segmentInfo.duration, 2, 'Sufficient test media duration');
+ mediaElement.play();
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'updateend', 'Media data appended to the SourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.duration = 60;
+ assert_false(sourceBuffer.updating, 'No SourceBuffer update when duration is increased');
+ test.done();
+ });
+ }, 'Increasing the duration during media playback does not trigger any SourceBuffer update');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-endofstream-invaliderror.html b/testing/web-platform/tests/media-source/mediasource-endofstream-invaliderror.html
new file mode 100644
index 0000000000..20a118d717
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-endofstream-invaliderror.html
@@ -0,0 +1,53 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Invalid MediaSource.endOfStream() parameter test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ assert_equals(mediaSource.readyState, 'open');
+
+ assert_throws_js(TypeError,
+ function() { mediaSource.endOfStream('garbage'); },
+ 'endOfStream(\'garbage\') throws TypeError');
+
+ assert_equals(mediaSource.readyState, 'open');
+ test.done();
+ }, 'Test MediaSource.endOfStream() with invalid non-empty error string.');
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ assert_equals(mediaSource.readyState, 'open');
+
+ assert_throws_js(TypeError,
+ function() { mediaSource.endOfStream(''); },
+ 'endOfStream(\'\') throws TypeError');
+
+ assert_equals(mediaSource.readyState, 'open');
+ test.done();
+ }, 'Test MediaSource.endOfStream() with invalid empty error string.');
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ assert_equals(mediaSource.readyState, 'open');
+
+ assert_throws_js(TypeError,
+ function() { mediaSource.endOfStream(null); },
+ 'endOfStream(null) throws TypeError');
+
+ assert_equals(mediaSource.readyState, 'open');
+ test.done();
+ }, 'Test MediaSource.endOfStream() with invalid null error parameter.');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-endofstream.html b/testing/web-platform/tests/media-source/mediasource-endofstream.html
new file mode 100644
index 0000000000..3af190ea3e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-endofstream.html
@@ -0,0 +1,73 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>Calls to MediaSource.endOfStream() without error</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaSource.duration = 2;
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.duration, 0);
+ test.done();
+ }, 'MediaSource.endOfStream(): duration truncated to 0 when there are no buffered coded frames');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'updateend',
+ 'Media buffer appended to SourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+ test.expectEvent(mediaElement, 'canplaythrough',
+ 'Media element may render the media content until the end');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_ENOUGH_DATA,
+ 'Media element has enough data to render the content');
+ test.done();
+ });
+ }, 'MediaSource.endOfStream(): media element notified that it now has all of the media data');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'updateend',
+ 'Media buffer appended to SourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBuffer.buffered.length, 1,
+ 'Media data properly buffered');
+ var highestEndTime = sourceBuffer.buffered.end(0);
+
+ // Note that segmentInfo.duration is expected to also be the
+ // highest track buffer range end time. Therefore, endOfStream() should
+ // not change duration with this media.
+ assert_approx_equals(segmentInfo.duration, mediaSource.duration, 0.001,
+ 'SegmentInfo duration should initially roughly match mediaSource duration');
+ assert_less_than_equal(highestEndTime, mediaSource.duration,
+ 'Media duration may be slightly longer than intersected track buffered ranges');
+
+ // Set the duration even higher, then confirm that endOfStream() drops it back to be
+ // the highest track buffer range end time.
+ mediaSource.duration += 10;
+ mediaSource.endOfStream();
+
+ assert_equals(sourceBuffer.buffered.length, 1,
+ 'Media data properly buffered after endOfStream');
+
+ assert_approx_equals(segmentInfo.duration, mediaSource.duration, 0.001,
+ 'SegmentInfo duration should still roughly match mediaSource duration');
+ assert_less_than_equal(highestEndTime, mediaSource.duration,
+ 'Media duration may be slightly longer than intersected track buffered ranges');
+ assert_equals(sourceBuffer.buffered.end(0), mediaSource.duration,
+ 'After endOfStream(), highest buffered range end time must be the highest track buffer range end time');
+
+ test.done();
+ });
+ }, 'MediaSource.endOfStream(): duration and buffered range end time before and after endOfStream');
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-errors.html b/testing/web-platform/tests/media-source/mediasource-errors.html
new file mode 100644
index 0000000000..b2224aa5f6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-errors.html
@@ -0,0 +1,273 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ function ErrorTest(testFunction, description)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var segmentInfo = MediaSourceUtil.SEGMENT_INFO;
+
+ if (!segmentInfo) {
+ assert_unreached("No segment info compatible with this MediaSource implementation.");
+ return;
+ }
+
+ var sourceBuffer = mediaSource.addSourceBuffer(segmentInfo.type);
+ MediaSourceUtil.loadBinaryData(test, segmentInfo.url, function(mediaData)
+ {
+ testFunction(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData);
+ });
+ }, description);
+ }
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "error", "sourceBuffer error.");
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ test.expectEvent(mediaSource, "sourceclose", "mediaSource closed.");
+ sourceBuffer.appendBuffer(mediaSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+
+ assert_equals(mediaSource.sourceBuffers.length, 0);
+ assert_equals(mediaSource.readyState, "closed");
+ test.done();
+ });
+ }, "Appending media segment before the first initialization segment.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ // Fail if the append error algorithm occurs, since the decode
+ // error will be provided by us directly via endOfStream().
+ sourceBuffer.addEventListener("error", test.unreached_func("'error' should not be fired on sourceBuffer"));
+
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ test.expectEvent(mediaSource, "sourceclose", "mediaSource closed.");
+
+ mediaSource.endOfStream("decode");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+
+ assert_equals(mediaSource.sourceBuffers.length, 0);
+ assert_equals(mediaSource.readyState, "closed");
+
+ // Give a short time for a broken implementation to errantly fire
+ // "error" on sourceBuffer.
+ test.step_timeout(test.step_func_done(), 0);
+ });
+ }, "Signaling 'decode' error via endOfStream() before initialization segment has been appended.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ // Fail if the append error algorithm occurs, since the network
+ // error will be provided by us directly via endOfStream().
+ sourceBuffer.addEventListener("error", test.unreached_func("'error' should not be fired on sourceBuffer"));
+
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ test.expectEvent(mediaSource, "sourceclose", "mediaSource closed.");
+
+ mediaSource.endOfStream("network");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+
+ assert_equals(mediaSource.sourceBuffers.length, 0);
+ assert_equals(mediaSource.readyState, "closed");
+
+ // Give a short time for a broken implementation to errantly fire
+ // "error" on sourceBuffer.
+ test.step_timeout(test.step_func_done(), 0);
+ });
+ }, "Signaling 'network' error via endOfStream() before initialization segment has been appended.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ // Fail if the append error algorithm occurs, since the decode
+ // error will be provided by us directly via endOfStream().
+ sourceBuffer.addEventListener("error", test.unreached_func("'error' should not be fired on sourceBuffer"));
+
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "mediaElement metadata.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_METADATA);
+
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ mediaSource.endOfStream("decode");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_DECODE);
+ assert_equals(mediaSource.readyState, "ended");
+
+ // Give a short time for a broken implementation to errantly fire
+ // "error" on sourceBuffer.
+ test.step_timeout(test.step_func_done(), 0);
+ });
+
+ }, "Signaling 'decode' error via endOfStream() after initialization segment has been appended and the HTMLMediaElement has reached HAVE_METADATA.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ // Fail if the append error algorithm occurs, since the network
+ // error will be provided by us directly via endOfStream().
+ sourceBuffer.addEventListener("error", test.unreached_func("'error' should not be fired on sourceBuffer"));
+
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "mediaElement metadata.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_METADATA);
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ mediaSource.endOfStream("network");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_NETWORK);
+ assert_equals(mediaSource.readyState, "ended");
+
+ // Give a short time for a broken implementation to errantly fire
+ // "error" on sourceBuffer.
+ test.step_timeout(test.step_func_done(), 0);
+ });
+ }, "Signaling 'network' error via endOfStream() after initialization segment has been appended and the HTMLMediaElement has reached HAVE_METADATA.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "mediaElement metadata.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_METADATA);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var index = segmentInfo.init.size + (mediaSegment.length - 1) / 2;
+ // Corrupt the media data from index of mediaData, so it can signal 'decode' error.
+ // Here use mediaSegment to replace the original mediaData[index, index + mediaSegment.length]
+ mediaData.set(mediaSegment, index);
+
+ test.expectEvent(sourceBuffer, "error", "sourceBuffer error.");
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaElement, "error", "mediaElement error.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ sourceBuffer.appendBuffer(mediaData);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_DECODE);
+ test.done();
+ });
+ }, "Signaling 'decode' error via segment parser loop algorithm after initialization segment has been appended.");
+
+ ErrorTest(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var index = segmentInfo.init.size + (mediaSegment.length - 1) / 2;
+ // Corrupt the media data from index of mediaData, so it can signal 'decode' error.
+ // Here use mediaSegment to replace the original mediaData[index, index + mediaSegment.length]
+ mediaData.set(mediaSegment, index);
+
+ // Depending on implementation, mediaElement transition to
+ // HAVE_METADATA and dispatching 'loadedmetadata' may occur, since the
+ // initialization segment is uncorrupted and forms the initial part of
+ // the appended bytes. The segment parser loop continues and
+ // eventually should observe decode error. Other implementations may
+ // delay such transition until some larger portion of the append's
+ // parsing is completed or until the media element is configured to
+ // handle the playback of media with the associated metadata (which may
+ // not occur in this case before the MSE append error algorithm executes.)
+ // So we cannot reliably expect the lack or presence of
+ // 'loadedmetadata' before the MSE append error algortihm executes in
+ // this case; similarly, mediaElement's resulting readyState may be
+ // either HAVE_NOTHING or HAVE_METADATA after the append error
+ // algorithm executes, and the resulting MediaError code would
+ // respectively be MEDIA_ERR_SRC_NOT_SUPPORTED or MEDIA_ERR_DECODE.
+ let loaded = false;
+ mediaElement.addEventListener("loadedmetadata", test.step_func(() => { loaded = true; }));
+ let errored = false;
+ mediaElement.addEventListener("error", test.step_func(() => { errored = true; }));
+
+ test.expectEvent(sourceBuffer, "error", "sourceBuffer error.");
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ test.expectEvent(mediaSource, "sourceended", "mediaSource ended.");
+ sourceBuffer.appendBuffer(mediaData);
+
+ let verifyFinalState = test.step_func(function() {
+ if (loaded) {
+ assert_greater_than(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_DECODE);
+ test.done();
+ } else {
+ assert_equals(mediaElement.readyState, HTMLMediaElement.HAVE_NOTHING);
+ assert_true(mediaElement.error != null);
+ assert_equals(mediaElement.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED);
+ test.done();
+ }
+ });
+
+ let awaitMediaElementError = test.step_func(function() {
+ if (!errored) {
+ test.step_timeout(awaitMediaElementError, 100);
+ } else {
+ verifyFinalState();
+ }
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Not all implementations will reliably fire a "loadedmetadata"
+ // event, so we use custom logic to verify mediaElement state based
+ // on whether or not "loadedmetadata" was ever fired. But first
+ // we must ensure "error" was fired on the mediaElement.
+ awaitMediaElementError();
+ });
+
+ }, "Signaling 'decode' error via segment parser loop algorithm of append containing init plus corrupted media segment.");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-getvideoplaybackquality.html b/testing/web-platform/tests/media-source/mediasource-getvideoplaybackquality.html
new file mode 100644
index 0000000000..54b2a55799
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-getvideoplaybackquality.html
@@ -0,0 +1,69 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>HTMLVideoElement.getVideoPlaybackQuality() test cases.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var previousQuality = mediaElement.getVideoPlaybackQuality();
+ var timeUpdateCount = 0;
+ mediaElement.addEventListener("timeupdate", test.step_func(function (e)
+ {
+ var videoElement = e.target;
+ var newQuality = videoElement.getVideoPlaybackQuality();
+ var now = window.performance.now();
+
+ assert_not_equals(previousQuality, newQuality,
+ "New quality object is different from the previous one");
+ assert_greater_than(newQuality.creationTime, previousQuality.creationTime,
+ "creationTime increases monotonically");
+ assert_approx_equals(newQuality.creationTime, now, 100,
+ "creationTime roughly equals current time");
+
+ assert_greater_than_equal(newQuality.totalVideoFrames, 0, "totalVideoFrames >= 0");
+ assert_greater_than_equal(newQuality.totalVideoFrames, previousQuality.totalVideoFrames,
+ "totalVideoFrames increases monotonically");
+ assert_less_than(newQuality.totalVideoFrames, 300,
+ "totalVideoFrames should remain low as duration is less than 10s and framerate less than 30fps");
+
+ assert_greater_than_equal(newQuality.droppedVideoFrames, 0, "droppedVideoFrames >= 0");
+ assert_greater_than_equal(newQuality.droppedVideoFrames, previousQuality.droppedVideoFrames,
+ "droppedVideoFrames increases monotonically");
+ assert_less_than_equal(newQuality.droppedVideoFrames, newQuality.totalVideoFrames,
+ "droppedVideoFrames is only a portion of totalVideoFrames");
+
+ previousQuality = newQuality;
+ timeUpdateCount++;
+ }));
+
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+ mediaSource.endOfStream();
+ assert_less_than(mediaSource.duration, 10, "duration");
+ mediaElement.play().catch(test.unreached_func("Unexpected promise rejection"));;
+ test.expectEvent(mediaElement, 'ended', 'mediaElement');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than(timeUpdateCount, 2, "timeUpdateCount");
+ test.done();
+ });
+ }, "Test HTMLVideoElement.getVideoPlaybackQuality() with MediaSource API");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-h264-play-starved.html b/testing/web-platform/tests/media-source/mediasource-h264-play-starved.html
new file mode 100644
index 0000000000..a3cdf3cb26
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-h264-play-starved.html
@@ -0,0 +1,57 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>Test MediaSource behavior when the decoder is starved.</title>
+ <meta content="text/html; charset=UTF-8" http-equiv="Content-Type">
+ <meta name="timeout" content="long">
+ <link rel="author" title="Alicia Boya García" href="mailto:aboya@igalia.com"/>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+ mediasource_test(function (test, video, mediaSource) {
+ if (!MediaSource.isTypeSupported('video/mp4; codecs="avc1.4d001e"')) {
+ // Format not supported, nothing to test in this platform.
+ test.done();
+ return;
+ }
+
+ let initSegment;
+ let mediaSegment;
+
+ const videoSB = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.4d001e"');
+
+ MediaSourceUtil.loadBinaryData(test, "mp4/h264-starvation-init.mp4", initDownloaded);
+
+ function initDownloaded(data) {
+ initSegment = data;
+ MediaSourceUtil.loadBinaryData(test, "mp4/h264-starvation-media.mp4", mediaDownloaded);
+ }
+
+ function mediaDownloaded(data) {
+ mediaSegment = data;
+ videoSB.appendBuffer(initSegment);
+ videoSB.addEventListener("updateend", initSegmentAppended);
+ }
+
+ function initSegmentAppended() {
+ videoSB.removeEventListener("updateend", initSegmentAppended);
+ videoSB.appendBuffer(mediaSegment);
+ videoSB.addEventListener("updateend", mediaSegmentAppended)
+ }
+
+ function mediaSegmentAppended() {
+ video.play();
+
+ video.addEventListener('timeupdate', function onTimeUpdate() {
+ if (video.currentTime >= 2)
+ test.done();
+ });
+ }
+ }, "Enough frames are played when the decoder is starved.")
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-invalid-codec.html b/testing/web-platform/tests/media-source/mediasource-invalid-codec.html
new file mode 100644
index 0000000000..19aa00c4d5
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-invalid-codec.html
@@ -0,0 +1,45 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>SourceBuffer handling of invalid codecs in the initialization segment</title>
+ <link rel="author" title="Alicia Boya García" href="mailto:aboya@igalia.com">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+ function testInvalidCodec(test, mediaElement, mediaSource, mediaType, url) {
+ assert_true(MediaSource.isTypeSupported(mediaType), `Media type not supported in this browser: isTypeSupported('${mediaType}')`);
+
+ MediaSourceUtil.loadBinaryData(test, url, (mediaData) => {
+ _testInvalidCodecWithData(test, mediaElement, mediaSource, mediaType, mediaData);
+ });
+ }
+
+ function _testInvalidCodecWithData(test, mediaElement, mediaSource, mediaType, mediaData) {
+ const sourceBuffer = mediaSource.addSourceBuffer(mediaType);
+ sourceBuffer.appendBuffer(mediaData);
+ test.expectEvent(sourceBuffer, 'error', 'Append ended with error');
+ test.waitForExpectedEvents(() => {
+ test.done();
+ })
+ }
+
+ // These test cases provide a typical media MIME type, but the actual files have been mangled to declare a different,
+ // unsupported, fictitious codec (MP4 fourcc: 'zzzz', WebM codec id 'V_ZZZ'). The browser should report a parsing
+ // error.
+
+ mediasource_test((test, mediaElement, mediaSource) => {
+ testInvalidCodec(test, mediaElement, mediaSource, 'video/mp4;codecs="avc1.4D4001"', 'mp4/invalid-codec.mp4');
+ }, 'Test an MP4 with an invalid codec results in an error.');
+
+ mediasource_test((test, mediaElement, mediaSource) => {
+ testInvalidCodec(test, mediaElement, mediaSource, 'video/webm; codecs="vp8"', 'webm/invalid-codec.webm');
+ }, 'Test a WebM with an invalid codec results in an error.');
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-is-type-supported.html b/testing/web-platform/tests/media-source/mediasource-is-type-supported.html
new file mode 100644
index 0000000000..93b067c692
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-is-type-supported.html
@@ -0,0 +1,106 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.isTypeSupported() test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ // Generate a distinct test for each type in types
+ function test_type_support(types, expectation, description)
+ {
+ for (var i = 0; i < types.length; ++i) {
+ test(function()
+ {
+ assert_equals(MediaSource.isTypeSupported(types[i]),
+ expectation, 'supported');
+ }, description + ' "' + types[i] + '"');
+ }
+ };
+
+ test_type_support([
+ 'video',
+ 'video/',
+ 'video/webm',
+ 'video/webm;',
+ 'video/webm;codecs',
+ 'video/webm;codecs=',
+ 'video/webm;codecs="',
+ 'video/webm;codecs=""',
+ 'video/webm;codecs=","',
+ 'audio/webm;aaacodecsbbb=opus',
+ 'unsupported_mediatype',
+ '',
+ null
+ ], false, 'Test invalid MIME format');
+
+ test_type_support([
+ 'xxx',
+ 'text/html',
+ 'image/jpeg'
+ ], false, 'Test invalid MSE MIME media type');
+
+ test_type_support([
+ 'audio/webm;codecs="vp8"',
+ 'audio/mp4;codecs="avc1.4d001e"',
+ 'audio/mp4;codecs="vorbis"',
+ 'audio/webm;codecs="mp4a.40.2"',
+ 'video/mp4;codecs="vp8"',
+ 'video/mp4;codecs="vorbis"',
+ 'video/webm;codecs="mp4a.40.2"',
+ ], false, 'Test invalid mismatch between MIME type and codec ID');
+
+ // Note that, though the user agent might support some subset of
+ // these for progressive non-MSE playback, the MSE mpeg audio
+ // bytestream format specification requires there to be no codecs
+ // parameter.
+ test_type_support([
+ 'audio/mpeg;codecs="mp3"',
+ 'audio/mpeg;codecs="mp4a.69"',
+ 'audio/mpeg;codecs="mp4a.6B"',
+ 'audio/aac;codecs="aac"',
+ 'audio/aac;codecs="adts"',
+ 'audio/aac;codecs="mp4a.40"',
+ ], false, 'Test invalid inclusion of codecs parameter for mpeg audio types');
+
+ test_type_support([
+ 'audio/mp4;codecs="mp4a"',
+ 'audio/mp4;codecs="mp4a.40"',
+ 'audio/mp4;codecs="mp4a.40."',
+ 'audio/mp4;codecs="mp4a.67.3"'
+ ], false, 'Test invalid codec ID');
+
+ test_type_support([
+ 'video/webm;codecs="vp8"',
+ 'video/webm;codecs="vorbis"',
+ 'video/webm;codecs="vp8,vorbis"',
+ 'video/webm;codecs="vorbis, vp8"',
+ 'audio/webm;codecs="vorbis"',
+ 'AUDIO/WEBM;CODECS="vorbis"',
+ 'audio/webm;codecs=vorbis;test="6"',
+ 'audio/webm;codecs="opus"',
+ 'video/webm;codecs="opus"'
+ ], true, 'Test valid WebM type');
+
+ test_type_support([
+ 'video/mp4;codecs="avc1.4d001e"', // H.264 Main Profile level 3.0
+ 'video/mp4;codecs="avc1.42001e"', // H.264 Baseline Profile level 3.0
+ 'audio/mp4;codecs="mp4a.40.2"', // MPEG4 AAC-LC
+ 'audio/mp4;codecs="mp4a.40.5"', // MPEG4 HE-AAC
+ 'audio/mp4;codecs="mp4a.67"', // MPEG2 AAC-LC
+ 'video/mp4;codecs="mp4a.40.2"',
+ 'video/mp4;codecs="avc1.4d001e,mp4a.40.2"',
+ 'video/mp4;codecs="mp4a.40.2 , avc1.4d001e "',
+ 'video/mp4;codecs="avc1.4d001e,mp4a.40.5"',
+ 'audio/mp4;codecs="Opus"',
+ 'video/mp4;codecs="Opus"',
+ 'audio/mp4;codecs="fLaC"',
+ 'video/mp4;codecs="fLaC"'
+ ], true, 'Test valid MP4 type');
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-liveseekable.html b/testing/web-platform/tests/media-source/mediasource-liveseekable.html
new file mode 100644
index 0000000000..123a41e9e5
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-liveseekable.html
@@ -0,0 +1,137 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<title>Checks setting/clearing the live seekable range and HTMLMediaElement.seekable</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+test(function(test)
+{
+ var mediaSource = new MediaSource();
+ assert_equals(mediaSource.readyState, "closed", "media source is closed.");
+ assert_throws_dom("InvalidStateError", function() { mediaSource.setLiveSeekableRange(0, 1); });
+}, "setLiveSeekableRange throws an InvalidStateError exception if the readyState attribute is not 'open'");
+
+
+test(function(test)
+{
+ var mediaSource = new MediaSource();
+ assert_equals(mediaSource.readyState, "closed", "media source is closed.");
+ assert_throws_dom("InvalidStateError", function() { mediaSource.clearLiveSeekableRange(); });
+}, "clearLiveSeekableRange throws an InvalidStateError exception if the readyState attribute is not 'open'");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ var mimetype = MediaSourceUtil.AUDIO_VIDEO_TYPE;
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+ assert_true(sourceBuffer.updating, "Updating set when a buffer is appended.");
+ mediaSource.setLiveSeekableRange(0, 1);
+ test.done();
+}, "setLiveSeekableRange does not restrict to not currently updating");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ var mimetype = MediaSourceUtil.AUDIO_VIDEO_TYPE;
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+ assert_true(sourceBuffer.updating, "Updating set when a buffer is appended.");
+ mediaSource.clearLiveSeekableRange();
+ test.done();
+}, "clearLiveSeekableRange does not restrict to not currently updating");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ assert_throws_js(TypeError, function() { mediaSource.setLiveSeekableRange(-1, 1); });
+ test.done();
+}, "setLiveSeekableRange throws a TypeError if start is negative");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ assert_throws_js(TypeError, function() { mediaSource.setLiveSeekableRange(2, 1); });
+ test.done();
+}, "setLiveSeekableRange throws a TypeError if start is greater than end");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaSource.setLiveSeekableRange(0, 1);
+ test.done();
+}, "setLiveSeekableRange returns with no error when conditions are correct");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaSource.clearLiveSeekableRange();
+ test.done();
+}, "clearLiveSeekableRange returns with no error when conditions are correct");
+
+
+mediasource_test(function(test, mediaElement, mediaSource)
+{
+ mediaSource.duration = +Infinity;
+ mediaSource.setLiveSeekableRange(1, 2);
+ assert_equals(mediaElement.seekable.length, 1,
+ 'The seekable attribute contains a single range.');
+ assertSeekableEquals(mediaElement, '{ [1.000, 2.000) }',
+ 'The seekable attribute returns the correct range.');
+
+ mediaSource.clearLiveSeekableRange();
+ assertSeekableEquals(mediaElement, '{ }',
+ 'The seekable attribute now returns an empty range.');
+ test.done();
+}, "HTMLMediaElement.seekable returns the live seekable range or an empty range if that range was cleared when nothing is buffered");
+
+
+mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+{
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ test.expectEvent(sourceBuffer, 'updateend', 'Init segment appended to SourceBuffer.');
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.duration = +Infinity;
+ mediaSource.setLiveSeekableRange(40, 42);
+
+ // Append a segment that starts after 1s to ensure seekable
+ // won't use 0 as starting point.
+ var midSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[5]);
+ test.expectEvent(sourceBuffer, 'updateend');
+ sourceBuffer.appendBuffer(midSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.seekable.length, 1,
+ 'The seekable attribute contains a single range.');
+ assert_equals(mediaElement.buffered.length, 1,
+ 'The buffered attribute contains a single range.');
+ assert_not_equals(mediaElement.seekable.start(0), 0,
+ 'The range starts after 0.');
+ assert_equals(mediaElement.seekable.start(0), mediaElement.buffered.start(0),
+ 'The start time is the start time of the buffered range.');
+ assert_equals(mediaElement.seekable.end(0), 42,
+ 'The end time is the end time of the seekable range.');
+
+ mediaSource.clearLiveSeekableRange();
+ assert_equals(mediaElement.seekable.length, 1,
+ 'The seekable attribute contains a single range.');
+ assert_equals(mediaElement.seekable.start(0), 0,
+ 'The start time is now 0.');
+ assert_equals(mediaElement.seekable.end(0), mediaElement.buffered.end(0),
+ 'The end time is now the end time of the buffered range.');
+
+ test.done();
+ });
+ });
+}, 'HTMLMediaElement.seekable returns the union of the buffered range and the live seekable range, when set');
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-multiple-attach.html b/testing/web-platform/tests/media-source/mediasource-multiple-attach.html
new file mode 100644
index 0000000000..4a95a42e83
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-multiple-attach.html
@@ -0,0 +1,114 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Test Attaching a MediaSource to multiple HTMLMediaElements.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ function twoMediaElementTest(testFunction, description)
+ {
+ media_test(function(test)
+ {
+ var firstMediaTag = document.createElement('video');
+ var secondMediaTag = document.createElement('video');
+ document.body.appendChild(firstMediaTag);
+ document.body.appendChild(secondMediaTag);
+
+ // Overload done() so that elements added to the document can be
+ // removed.
+ var removeMediaElements = true;
+ var oldTestDone = test.done.bind(test);
+ test.done = function()
+ {
+ if (removeMediaElements) {
+ document.body.removeChild(secondMediaTag);
+ document.body.removeChild(firstMediaTag);
+ removeMediaElements = false;
+ }
+ oldTestDone();
+ };
+
+ testFunction(test, firstMediaTag, secondMediaTag);
+ }, description);
+ }
+
+ twoMediaElementTest(function(test, firstMediaTag, secondMediaTag)
+ {
+ // When attachment of mediaSource to two MediaElements is done
+ // without an intervening stable state, exactly one of the two
+ // MediaElements should successfully attach, and the other one
+ // should get error event due to mediaSource already in 'open'
+ // readyState.
+ var mediaSource = new MediaSource();
+ var mediaSourceURL = URL.createObjectURL(mediaSource);
+ var gotSourceOpen = false;
+ var gotError = false;
+ var doneIfFinished = test.step_func(function()
+ {
+ if (gotSourceOpen && gotError)
+ test.done();
+ });
+ var errorHandler = test.step_func(function(e)
+ {
+ firstMediaTag.removeEventListener('error', errorHandler);
+ secondMediaTag.removeEventListener('error', errorHandler);
+
+ var eventTarget = e.target;
+ var otherTarget;
+ if (eventTarget == firstMediaTag) {
+ otherTarget = secondMediaTag;
+ } else {
+ assert_equals(eventTarget, secondMediaTag, 'Error target check');
+ otherTarget = firstMediaTag;
+ }
+
+ assert_true(eventTarget.error != null, 'Error state on one tag');
+ assert_equals(eventTarget.error.code, MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED, 'Expected error code');
+ assert_equals(otherTarget.error, null, 'No error on other tag');
+
+ assert_equals(eventTarget.networkState, HTMLMediaElement.NETWORK_NO_SOURCE,
+ 'Tag with error state networkState');
+ assert_equals(otherTarget.networkState, HTMLMediaElement.NETWORK_LOADING,
+ 'Tag without error state networkState');
+
+ gotError = true;
+ doneIfFinished();
+ });
+
+ test.expectEvent(mediaSource, 'sourceopen', 'An attachment succeeded');
+ firstMediaTag.addEventListener('error', errorHandler);
+ secondMediaTag.addEventListener('error', errorHandler);
+
+ firstMediaTag.src = mediaSourceURL;
+ secondMediaTag.src = mediaSourceURL;
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, 'open', 'Source is opened');
+ gotSourceOpen = true;
+ doneIfFinished();
+ });
+ }, 'Test exactly one succeeds when two MediaElements attach to same MediaSource');
+
+ mediasource_test(function(test, mediaElement, mediaSource) {
+ assert_equals(mediaSource.readyState, 'open', 'Source open');
+ // Set the tag's src attribute. This should close mediaSource,
+ // reattach it to the tag, and initiate source reopening.
+ test.expectEvent(mediaSource, 'sourceopen', 'Source attached again');
+ mediaElement.src = URL.createObjectURL(mediaSource);
+ assert_equals(mediaSource.readyState, 'closed', 'Source closed');
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, 'open', 'Source reopened');
+ test.done();
+ });
+ }, 'Test that MediaSource can reattach if closed first');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-play-then-seek-back.html b/testing/web-platform/tests/media-source/mediasource-play-then-seek-back.html
new file mode 100644
index 0000000000..66fdbe810d
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-play-then-seek-back.html
@@ -0,0 +1,57 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Simple MediaSource playback &amp; seek test case.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+
+ mediaElement.play();
+ // Append all the segments
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'playing', 'Playing triggered');
+ sourceBuffer.appendBuffer(mediaData);
+
+ function confirmPlayThenEnd()
+ {
+ test.waitForCurrentTimeChange(mediaElement, function ()
+ {
+ assert_greater_than(mediaElement.currentTime, 0.0, 'Playback has started after seek.');
+ test.done();
+ });
+ }
+
+ function finishSeekThenPlay()
+ {
+ test.expectEvent(mediaElement, 'seeked', 'mediaElement finished seek');
+
+ test.waitForExpectedEvents(confirmPlayThenEnd);
+ }
+
+ function delayedPlayHandler()
+ {
+ assert_greater_than(mediaElement.currentTime, 0.0, 'Playback has started.');
+ test.expectEvent(mediaElement, 'seeking', 'mediaElement');
+ mediaElement.currentTime = 0.0;
+ assert_true(mediaElement.seeking, 'mediaElement is seeking');
+
+ test.waitForExpectedEvents(finishSeekThenPlay);
+ }
+
+ test.waitForExpectedEvents(function()
+ {
+ test.waitForCurrentTimeChange(mediaElement, delayedPlayHandler);
+ });
+
+ }, 'Test playing then seeking back.');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-play.html b/testing/web-platform/tests/media-source/mediasource-play.html
new file mode 100644
index 0000000000..2129b8f473
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-play.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Simple MediaSource playback test case.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener('ended', test.step_func_done());
+
+ test.expectEvent(sourceBuffer, 'updatestart', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'update', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+
+ assert_false(sourceBuffer.updating, "sourceBuffer.updating");
+
+ sourceBuffer.appendBuffer(mediaData);
+
+ assert_true(sourceBuffer.updating, "sourceBuffer.updating");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "sourceBuffer.updating");
+
+ // Truncate the buffered media to about 1 second duration.
+ sourceBuffer.remove(1, +Infinity);
+
+ assert_true(sourceBuffer.updating, "sourceBuffer.updating");
+ test.expectEvent(sourceBuffer, 'updatestart', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'update', 'sourceBuffer');
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+ assert_greater_than(mediaSource.duration, 1, "duration");
+
+ // Complete truncation of duration to 1 second.
+ mediaSource.duration = 1;
+
+ test.expectEvent(mediaElement, "durationchange");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+ mediaElement.play();
+ });
+ }, "Test normal playback case with MediaSource API");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-preload.html b/testing/web-platform/tests/media-source/mediasource-preload.html
new file mode 100644
index 0000000000..e387b63737
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-preload.html
@@ -0,0 +1,72 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Various MediaSource HTMLMediaElement preload tests.</title>
+ <link rel="author" title="Matthew Wolenetz" href="mailto:wolenetz@chromium.org"/>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ </head>
+ <body>
+ <script>
+ function attachWithPreloadTest(preload)
+ {
+ async_test(function(test)
+ {
+ var video = document.createElement("video");
+ var mediaSource = new MediaSource();
+ var mediaSourceURL = URL.createObjectURL(mediaSource);
+
+ video.preload = preload;
+ document.body.appendChild(video);
+ test.add_cleanup(function() {
+ document.body.removeChild(video);
+ URL.revokeObjectURL(mediaSourceURL);
+ });
+
+ mediaSource.addEventListener("sourceopen", test.step_func_done());
+ video.src = mediaSourceURL;
+ }, "sourceopen occurs with element preload=" + preload);
+ }
+
+ attachWithPreloadTest("auto");
+ attachWithPreloadTest("metadata");
+ attachWithPreloadTest("none");
+
+ function errorWithPreloadTest(preload, bogusURLStyle)
+ {
+ async_test(function(test)
+ {
+ var mediaSource = new MediaSource();
+ var bogusURL = URL.createObjectURL(mediaSource);
+
+ if (bogusURLStyle == "corrupted") {
+ var goodURL = bogusURL;
+ test.add_cleanup(function() { URL.revokeObjectURL(goodURL); });
+ bogusURL += "0";
+ } else if (bogusURLStyle == "revoked") {
+ URL.revokeObjectURL(bogusURL);
+ } else {
+ assert_unreached("invalid case");
+ }
+
+ var video = document.createElement("video");
+ video.preload = preload;
+ document.body.appendChild(video);
+ test.add_cleanup(function() { document.body.removeChild(video); });
+
+ mediaSource.addEventListener("sourceopen", test.unreached_func("'sourceopen' should not be fired"));
+
+ video.onerror = test.step_func_done();
+ video.src = bogusURL;
+ }, "error occurs with bogus blob URL (" + bogusURLStyle + " MediaSource object URL) and element preload=" + preload);
+ }
+
+ errorWithPreloadTest("auto", "revoked");
+ errorWithPreloadTest("metadata", "revoked");
+
+ errorWithPreloadTest("auto", "corrupted");
+ errorWithPreloadTest("metadata", "corrupted");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-redundant-seek.html b/testing/web-platform/tests/media-source/mediasource-redundant-seek.html
new file mode 100644
index 0000000000..05eae9714f
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-redundant-seek.html
@@ -0,0 +1,73 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Test MediaSource behavior when receiving multiple seek requests during a pending seek.</title>
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.play();
+
+ // Append all media data for complete playback.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer end update.');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'Reached HAVE_METADATA');
+ test.expectEvent(mediaElement, 'playing', 'Playing media.');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ var bufferedRanges = mediaElement.buffered;
+
+ assert_greater_than_equal(mediaElement.duration, 4.0, 'Duration is >= 4.0s');
+ assert_equals(bufferedRanges.length, 1, 'Just one buffered range');
+ assert_less_than_equal(bufferedRanges.start(0), 1.0, 'Buffered range starts <= 1.0s');
+ assert_greater_than_equal(bufferedRanges.end(0), 4.0, 'Buffered range ends >= 4.0s');
+
+ test.expectEvent(mediaElement, 'seeking', 'seeking');
+ test.expectEvent(mediaElement, 'timeupdate', 'timeupdate');
+ test.expectEvent(mediaElement, 'seeked', 'seeked');
+
+ // Request seeks.
+ mediaElement.currentTime = 1.0;
+
+ // This 'ephemeral' seek should be invisible to javascript, except any latency incurred in its processing.
+ mediaElement.currentTime = 3.0;
+
+ mediaElement.currentTime = 1.0;
+
+ assert_true(mediaElement.seeking, 'Element is seeking');
+ assert_equals(mediaElement.currentTime, 1.0, 'Element time is at last seek time');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // No more seeking or seeked events should occur.
+ mediaElement.addEventListener('seeking', test.unreached_func("Unexpected event 'seeking'"));
+ mediaElement.addEventListener('seeked', test.unreached_func("Unexpected event 'seeked'"));
+
+ assert_false(mediaElement.seeking, 'Element is not seeking');
+ assert_greater_than_equal(mediaElement.currentTime, 1.0, 'Element time is at or after last seek time');
+ assert_less_than(mediaElement.currentTime, 3.0, 'Element time is before the ephemeral seek time');
+
+ var timeBeforeWait = mediaElement.currentTime;
+ test.waitForCurrentTimeChange(mediaElement, function()
+ {
+ // Time should have advanced a little, but not yet reached the ephemeral seek time.
+ assert_greater_than(mediaElement.currentTime, timeBeforeWait, 'Element time has increased');
+ assert_less_than(mediaElement.currentTime, 3.0, 'Element time is still before the ephemeral seek time');
+ test.done();
+ });
+ });
+ }, 'Test redundant fully prebuffered seek');
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-remove.html b/testing/web-platform/tests/media-source/mediasource-remove.html
new file mode 100644
index 0000000000..6fea5a3e2e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-remove.html
@@ -0,0 +1,324 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <title>SourceBuffer.remove() test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(-1, 2);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with an negative start.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ [ undefined, NaN, Infinity, -Infinity ].forEach(function(item)
+ {
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(item, 2);
+ }, "remove");
+ });
+
+ test.done();
+ }, "Test remove with non-finite start.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(11, 12);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with a start beyond the duration.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(2, 1);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with a start larger than the end.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(0, Number.NEGATIVE_INFINITY);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with a NEGATIVE_INFINITY end.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(0, Number.NaN);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with a NaN end.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.remove(1, 2);
+ }, "remove");
+
+ test.done();
+ }, "Test remove after SourceBuffer removed from mediaSource.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ assert_false(sourceBuffer.updating, "updating is false");
+ assert_equals(mediaSource.duration, NaN, "duration isn't set");
+
+ assert_throws_js(TypeError, function()
+ {
+ sourceBuffer.remove(0, 0);
+ }, "remove");
+
+ test.done();
+ }, "Test remove with a NaN duration.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.remove(1, 2);
+
+ assert_true(sourceBuffer.updating, "updating");
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.remove(3, 4);
+ }, "remove");
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test remove while update pending.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+
+ mediaSource.duration = 10;
+
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.remove(1, 2);
+
+ assert_true(sourceBuffer.updating, "updating");
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.abort();
+ }, "abort");
+
+ assert_true(sourceBuffer.updating, "updating");
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test aborting a remove operation.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_less_than(mediaSource.duration, 10)
+
+ mediaSource.duration = 10;
+
+ sourceBuffer.remove(mediaSource.duration, mediaSource.duration + 2);
+
+ assert_true(sourceBuffer.updating, "updating");
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+
+ }, "Test remove with a start at the duration.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+
+ assert_equals(mediaSource.readyState, "ended");
+
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ test.expectEvent(mediaSource, "sourceopen");
+ sourceBuffer.remove(1, 2);
+
+ assert_true(sourceBuffer.updating, "updating");
+ assert_equals(mediaSource.readyState, "open");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+ test.done();
+ });
+ }, "Test remove transitioning readyState from 'ended' to 'open'.");
+
+ function removeAppendedDataTests(callback, description)
+ {
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+ assert_false(sourceBuffer.updating, "updating");
+
+ var start = Math.max(segmentInfo.media[0].timev, segmentInfo.media[0].timea).toFixed(3);
+ var duration = mediaElement.duration.toFixed(3);
+ var subType = MediaSourceUtil.getSubType(segmentInfo.type);
+
+ assertBufferedEquals(sourceBuffer, "{ [" + start + ", " + duration + ") }", "Initial buffered range.");
+ callback(test, mediaSource, sourceBuffer, duration, subType, segmentInfo);
+ });
+ }, description);
+ };
+ function removeAndCheckBufferedRanges(test, mediaSource, sourceBuffer, start, end, expected)
+ {
+ test.expectEvent(sourceBuffer, "updatestart");
+ test.expectEvent(sourceBuffer, "update");
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.remove(start, end);
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+ assert_false(sourceBuffer.updating, "updating");
+
+ assertBufferedEquals(sourceBuffer, expected, "Buffered ranges after remove().");
+ test.done();
+ });
+ }
+
+ removeAppendedDataTests(function(test, mediaSource, sourceBuffer, duration, subType, segmentInfo)
+ {
+ removeAndCheckBufferedRanges(test, mediaSource, sourceBuffer, 0, Number.POSITIVE_INFINITY, "{ }");
+ }, "Test removing all appended data.");
+
+ removeAppendedDataTests(function(test, mediaSource, sourceBuffer, duration, subType, segmentInfo)
+ {
+ var expectations = {
+ webm: ("{ [3.315, " + duration + ") }"),
+ mp4: ("{ [3.298, " + duration + ") }"),
+ };
+
+ // Note: Range doesn't start exactly at the end of the remove range because there isn't
+ // a keyframe there. The resulting range starts at the first keyframe >= the end time.
+ removeAndCheckBufferedRanges(test, mediaSource, sourceBuffer, 0, 3, expectations[subType]);
+ }, "Test removing beginning of appended data.");
+
+ removeAppendedDataTests(function(test, mediaSource, sourceBuffer, duration, subType, segmentInfo)
+ {
+ var start = Math.max(segmentInfo.media[0].timev, segmentInfo.media[0].timea).toFixed(3);
+ var expectations = {
+ webm: ("{ [" + start + ", 1.005) [3.315, " + duration + ") }"),
+ mp4: ("{ [" + start + ", 0.997) [3.298, " + duration + ") }"),
+ };
+
+ // Note: The first resulting range ends slightly after start because the removal algorithm only removes
+ // frames with a timestamp >= the start time. If a frame starts before and ends after the remove() start
+ // timestamp, then it stays in the buffer.
+ removeAndCheckBufferedRanges(test, mediaSource, sourceBuffer, 1, 3, expectations[subType]);
+ }, "Test removing the middle of appended data.");
+
+ removeAppendedDataTests(function(test, mediaSource, sourceBuffer, duration, subType, segmentInfo)
+ {
+ var start = Math.max(segmentInfo.media[0].timev, segmentInfo.media[0].timea).toFixed(3);
+ var expectations = {
+ webm: "{ [" + start + ", 1.013) }",
+ mp4: "{ [" + start + ", 1.022) }",
+ };
+
+ removeAndCheckBufferedRanges(test, mediaSource, sourceBuffer, 1, Number.POSITIVE_INFINITY, expectations[subType]);
+ }, "Test removing the end of appended data.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-removesourcebuffer.html b/testing/web-platform/tests/media-source/mediasource-removesourcebuffer.html
new file mode 100644
index 0000000000..30ec930cbe
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-removesourcebuffer.html
@@ -0,0 +1,146 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>MediaSource.removeSourceBuffer() test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_class_string(sourceBuffer, "SourceBuffer", "New SourceBuffer returned");
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ var sourceBuffer2 = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_class_string(sourceBuffer2, "SourceBuffer", "New SourceBuffer returned");
+ assert_not_equals(sourceBuffer, sourceBuffer2, "SourceBuffers are different instances.");
+ assert_equals(mediaSource.sourceBuffers.length, 1, "sourceBuffers.length == 1");
+
+ test.done();
+ }, "Test addSourceBuffer(), removeSourceBuffer(), addSourceBuffer() sequence.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ assert_throws_js(TypeError,
+ function() { mediaSource.removeSourceBuffer(null); },
+ "removeSourceBuffer() threw an exception when passed null.");
+ test.done();
+ }, "Test removeSourceBuffer() with null");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_class_string(sourceBuffer, "SourceBuffer", "New SourceBuffer returned");
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_throws_dom("NotFoundError",
+ function() { mediaSource.removeSourceBuffer(sourceBuffer); },
+ "removeSourceBuffer() threw an exception for a SourceBuffer that was already removed.");
+
+ test.done();
+ }, "Test calling removeSourceBuffer() twice with the same object.");
+
+ mediasource_test(function(test, mediaElement1, mediaSource1)
+ {
+ var sourceBuffer1 = mediaSource1.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ assert_class_string(sourceBuffer1, "SourceBuffer", "New SourceBuffer returned");
+
+ var mediaElement2 = document.createElement("video");
+ document.body.appendChild(mediaElement2);
+ test.add_cleanup(function() { document.body.removeChild(mediaElement2); });
+
+ var mediaSource2 = new MediaSource();
+ var mediaSource2URL = URL.createObjectURL(mediaSource2);
+ mediaElement2.src = mediaSource2URL;
+ test.expectEvent(mediaSource2, "sourceopen", "Second MediaSource opened");
+ test.waitForExpectedEvents(function()
+ {
+ URL.revokeObjectURL(mediaSource2URL);
+
+ var sourceBuffer2 = mediaSource2.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+ assert_class_string(sourceBuffer2, "SourceBuffer", "Second new SourceBuffer returned");
+ assert_not_equals(mediaSource1, mediaSource2, "MediaSources are different instances");
+ assert_not_equals(sourceBuffer1, sourceBuffer2, "SourceBuffers are different instances");
+ assert_equals(mediaSource1.sourceBuffers[0], sourceBuffer1);
+ assert_equals(mediaSource2.sourceBuffers[0], sourceBuffer2);
+ assert_throws_dom("NotFoundError",
+ function() { mediaSource1.removeSourceBuffer(sourceBuffer2); },
+ "MediaSource1.removeSourceBuffer() threw an exception for SourceBuffer2");
+ assert_throws_dom("NotFoundError",
+ function() { mediaSource2.removeSourceBuffer(sourceBuffer1); },
+ "MediaSource2.removeSourceBuffer() threw an exception for SourceBuffer1");
+ mediaSource1.removeSourceBuffer(sourceBuffer1);
+ mediaSource2.removeSourceBuffer(sourceBuffer2);
+ test.done();
+ });
+ }, "Test calling removeSourceBuffer() for a sourceBuffer belonging to a different mediaSource instance.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_class_string(sourceBuffer, "SourceBuffer", "New SourceBuffer returned");
+
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, "ended", "MediaSource in ended state");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+
+ assert_equals(mediaSource.sourceBuffers.length, 0, "MediaSource.sourceBuffers is empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "MediaSource.activesourceBuffers is empty");
+
+ test.done();
+ }, "Test calling removeSourceBuffer() in ended state.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ test.expectEvent(mediaElement, "loadedmetadata", "loadedmetadata done.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.sourceBuffers.length, 1, "MediaSource.sourceBuffers is not empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 1, "MediaSource.activesourceBuffers is not empty");
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA);
+ assert_equals(mediaSource.duration, segmentInfo.duration);
+ test.expectEvent(mediaSource.activeSourceBuffers, "removesourcebuffer", "SourceBuffer removed from activeSourceBuffers.");
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "SourceBuffer removed.");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.sourceBuffers.length, 0, "MediaSource.sourceBuffers is empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "MediaSource.activesourceBuffers is empty");
+ test.done();
+ });
+ }, "Test removesourcebuffer event on activeSourceBuffers.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ var mimetype = MediaSourceUtil.AUDIO_VIDEO_TYPE;
+ var sourceBuffer = mediaSource.addSourceBuffer(mimetype);
+ sourceBuffer.appendBuffer(new Uint8Array(0));
+ assert_true(sourceBuffer.updating, "Updating flag set when a buffer is appended.");
+ test.expectEvent(sourceBuffer, 'abort');
+ test.expectEvent(sourceBuffer, 'updateend');
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_false(sourceBuffer.updating, "Updating flag reset after abort.");
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test abort event when removeSourceBuffer() called while SourceBuffer is updating");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-replay.html b/testing/web-platform/tests/media-source/mediasource-replay.html
new file mode 100644
index 0000000000..05a8c0a918
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-replay.html
@@ -0,0 +1,41 @@
+<!DOCTYPE html>
+<!-- Copyright © 2019 Igalia S.L -->
+<html>
+<head>
+ <title>MediaSource replay test case.</title>
+ <meta name="timeout" content="long">
+ <meta charset="utf-8">
+ <link rel="author" title="Alicia Boya García" href="mailto:aboya@igalia.com">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+ mediasource_testafterdataloaded(function (test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData) {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function () {
+ mediaSource.endOfStream();
+
+ // Start playing near the end.
+ mediaElement.currentTime = 6.2;
+ mediaElement.play();
+ test.expectEvent(mediaElement, 'ended', 'mediaElement');
+ });
+
+ test.waitForExpectedEvents(function () {
+ mediaElement.play();
+ assert_equals(mediaElement.currentTime, 0, "currentTime");
+ // If currentTime is able to advance, the player did not get stuck and it's a pass.
+ test.waitForCurrentTimeChange(mediaElement, test.step_func_done());
+ });
+ }, "Test replaying video after 'ended'");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-seek-beyond-duration.html b/testing/web-platform/tests/media-source/mediasource-seek-beyond-duration.html
new file mode 100644
index 0000000000..8b07c9f801
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-seek-beyond-duration.html
@@ -0,0 +1,105 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Test MediaSource behavior when seeking beyond the duration of the clip.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+
+ function seekToSpecifiedTimeSetEOSAndVerifyDone(test, mediaElement, mediaSource, seekToTime)
+ {
+ assert_less_than(mediaElement.currentTime, mediaElement.duration, 'Not at the end yet.');
+ test.expectEvent(mediaElement, 'seeking', 'mediaElement seeking');
+ // Seek to specified time.
+ mediaElement.currentTime = seekToTime;
+ if (seekToTime >= mediaSource.duration) {
+ assert_equals(mediaElement.currentTime, mediaSource.duration, 'Current time equals duration.');
+ } else {
+ assert_equals(mediaElement.currentTime, seekToTime, 'Current time equals specified seek time.');
+ }
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(mediaElement, 'timeupdate', 'mediaElement time updated.');
+ test.expectEvent(mediaElement, 'seeked', 'mediaElement seeked');
+ test.expectEvent(mediaElement, 'ended', 'mediaElement ended.');
+ test.expectEvent(mediaSource, 'sourceended', 'mediaSource ended.');
+ mediaSource.endOfStream();
+ assert_true(mediaElement.seeking, 'mediaElement seeking.');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.currentTime, mediaSource.duration, 'Current time equals duration.');
+ test.done();
+ });
+ };
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.play();
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+
+ // Append the initialization segment to trigger a transition to HAVE_METADATA.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer end update.');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'Reached HAVE_METADATA');
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ // Add sufficient segments to have at least 2s of play-time.
+ var playbackData = MediaSourceUtil.getMediaDataForPlaybackTime(mediaData, segmentInfo, 2.0);
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'playing', 'Playing media.');
+ sourceBuffer.appendBuffer(playbackData);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.duration, segmentInfo.duration);
+ assert_greater_than_equal(mediaElement.duration, 2.0, 'Duration is >2.0s.');
+
+ test.expectEvent(sourceBuffer, "updateend");
+ sourceBuffer.remove(1.5, Infinity);
+ assert_true(sourceBuffer.updating, "updating");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating");
+ test.waitForCurrentTimeChange(mediaElement, function()
+ {
+ // Update duration.
+ mediaSource.duration = 1.5;
+ seekToSpecifiedTimeSetEOSAndVerifyDone(test, mediaElement, mediaSource, 1.8);
+ });
+ });
+ }, 'Test seeking beyond updated media duration.');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.play();
+
+ // Append all media data for complete playback.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer end update.');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'Reached HAVE_METADATA');
+ test.expectEvent(mediaElement, 'playing', 'Playing media.');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ test.waitForCurrentTimeChange(mediaElement, function()
+ {
+ seekToSpecifiedTimeSetEOSAndVerifyDone(test, mediaElement, mediaSource, mediaSource.duration, mediaSource.duration + 0.1);
+ });
+ });
+
+ }, 'Test seeking beyond media duration.');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-seek-during-pending-seek.html b/testing/web-platform/tests/media-source/mediasource-seek-during-pending-seek.html
new file mode 100644
index 0000000000..60c5eec1c7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-seek-during-pending-seek.html
@@ -0,0 +1,189 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>Test MediaSource behavior when a seek is requested while another seek is pending.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.play();
+
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var firstSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var segmentIndex = 2;
+ var secondSegmentInfo = segmentInfo.media[segmentIndex];
+
+ // Append the initialization segment to trigger a transition to HAVE_METADATA.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'Reached HAVE_METADATA');
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(mediaElement.seeking, 'mediaElement is not seeking');
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA, 'Still in HAVE_METADATA');
+
+ // Seek to a new position before letting the initial seek to 0 completes.
+ test.expectEvent(mediaElement, 'seeking', 'mediaElement');
+ mediaElement.currentTime = Math.max(secondSegmentInfo.timev, secondSegmentInfo.timea);
+ assert_true(mediaElement.seeking, 'mediaElement is seeking');
+
+ // Append media data for time 0.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ sourceBuffer.appendBuffer(firstSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Verify that the media data didn't trigger a 'seeking' event or a transition beyond HAVE_METADATA.
+ assert_true(mediaElement.seeking, 'mediaElement is still seeking');
+ assert_equals(mediaElement.readyState, mediaElement.HAVE_METADATA, 'Still in HAVE_METADATA');
+
+ // Append media data for the current position until the element starts playing.
+ test.expectEvent(mediaElement, 'seeked', 'mediaElement finished seek');
+ test.expectEvent(mediaElement, 'playing', 'mediaElement playing');
+
+ MediaSourceUtil.appendUntilEventFires(test, mediaElement, 'playing', sourceBuffer, mediaData, segmentInfo, segmentIndex);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ if (sourceBuffer.updating)
+ {
+ // The event playing was fired prior to the appendBuffer completing.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'append have compleded');
+ test.expectEvent(mediaSource, 'sourceended', 'mediaSource ended');
+ mediaSource.endOfStream();
+ });
+ }
+ else
+ {
+ test.expectEvent(mediaSource, 'sourceended', 'mediaSource ended');
+ mediaSource.endOfStream();
+ }
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Note: we just completed the seek. However, we only have less than a second worth of data to play. It is possible that
+ // playback has reached the end since the seek completed.
+ if (!mediaElement.paused)
+ {
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA, 'Greater or equal than HAVE_CURRENT_DATA');
+ }
+ else
+ {
+ assert_true(mediaElement.ended);
+ }
+ test.done();
+ });
+
+ }, 'Test seeking to a new location before transitioning beyond HAVE_METADATA.');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaElement.play();
+
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var firstSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var secondSegmentInfo = segmentInfo.media[2];
+ var secondSegment = MediaSourceUtil.extractSegmentData(mediaData, secondSegmentInfo);
+ var segmentIndex = 4;
+ var thirdSegmentInfo = segmentInfo.media[segmentIndex];
+
+ // Append the initialization segment to trigger a transition to HAVE_METADATA.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'loadedmetadata', 'Reached HAVE_METADATA');
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.expectEvent(mediaElement, 'playing', 'mediaElement playing');
+ sourceBuffer.appendBuffer(firstSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_greater_than(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA, 'Greater than HAVE_CURRENT_DATA');
+
+ // Seek to a new position.
+ test.expectEvent(mediaElement, 'seeking', 'mediaElement');
+ mediaElement.currentTime = Math.max(secondSegmentInfo.timev, secondSegmentInfo.timea);
+ assert_true(mediaElement.seeking, 'mediaElement is seeking');
+
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_true(mediaElement.seeking, 'mediaElement is still seeking');
+
+ // Seek to a second position while the first seek is still pending.
+ test.expectEvent(mediaElement, 'seeking', 'mediaElement');
+ mediaElement.currentTime = Math.max(thirdSegmentInfo.timev, thirdSegmentInfo.timea);
+ assert_true(mediaElement.seeking, 'mediaElement is seeking');
+
+ // Append media data for the first seek position.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ sourceBuffer.appendBuffer(secondSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Note that we can't assume that the element is still seeking
+ // when the seeking event is fired as the operation is asynchronous.
+
+ // Append media data for the second seek position.
+ test.expectEvent(mediaElement, 'seeked', 'mediaElement finished seek');
+ MediaSourceUtil.appendUntilEventFires(test, mediaElement, 'seeked', sourceBuffer, mediaData, segmentInfo, segmentIndex);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(mediaElement.seeking, 'mediaElement is no longer seeking');
+
+ if (sourceBuffer.updating)
+ {
+ // The event seeked was fired prior to the appendBuffer completing.
+ test.expectEvent(sourceBuffer, 'updateend', 'sourceBuffer');
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'append have compleded');
+ test.expectEvent(mediaSource, 'sourceended', 'mediaSource ended');
+ mediaSource.endOfStream();
+ });
+ }
+ else
+ {
+ test.expectEvent(mediaSource, 'sourceended', 'mediaSource ended');
+ mediaSource.endOfStream();
+ }
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ // Note: we just completed the seek. However, we only have less than a second worth of data to play. It is possible that
+ // playback has reached the end since the seek completed.
+ if (!mediaElement.paused)
+ {
+ assert_greater_than_equal(mediaElement.readyState, mediaElement.HAVE_CURRENT_DATA, 'Greater or equal than HAVE_CURRENT_DATA');
+ }
+ else
+ {
+ assert_true(mediaElement.ended);
+ }
+ test.done();
+ });
+ }, 'Test seeking to a new location during a pending seek.');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-seekable.html b/testing/web-platform/tests/media-source/mediasource-seekable.html
new file mode 100644
index 0000000000..8e228d3466
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-seekable.html
@@ -0,0 +1,67 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+ mediaElement.addEventListener('ended', test.step_func_done(function () {}));
+
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ assertSeekableEquals(mediaElement, '{ }', 'mediaElement.seekable');
+ test.done();
+ }, 'Get seekable time ranges when the sourcebuffer is empty.');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ test.expectEvent(mediaElement, 'durationchange', 'mediaElement got duration');
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.duration, segmentInfo.duration);
+ assertSeekableEquals(mediaElement, '{ [0.000, ' + segmentInfo.duration.toFixed(3) + ') }', 'mediaElement.seekable');
+ test.done();
+ });
+ }, 'Get seekable time ranges after init segment received.');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ test.expectEvent(mediaElement, 'durationchange', 'mediaElement got duration after initsegment');
+ test.expectEvent(sourceBuffer, 'update');
+ test.expectEvent(sourceBuffer, 'updateend');
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "updating attribute is false");
+ test.expectEvent(mediaElement, 'durationchange', 'mediaElement got infinity duration');
+ mediaSource.duration = Infinity;
+ test.waitForExpectedEvents(function()
+ {
+ assertSeekableEquals(mediaElement, '{ }', 'mediaElement.seekable');
+
+ // Append a segment from the middle of the stream to make sure that seekable does not use buffered.start(0) or duration as first or last value
+ var midSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[2]);
+ test.expectEvent(sourceBuffer, 'update');
+ test.expectEvent(sourceBuffer, 'updateend');
+ sourceBuffer.appendBuffer(midSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaElement.seekable.length, 1, 'mediaElement.seekable.length');
+ assert_equals(mediaElement.buffered.length, 1, 'mediaElement.buffered.length');
+ assert_not_equals(mediaElement.seekable.start(0), mediaElement.buffered.start(0));
+ assert_equals(mediaElement.seekable.start(0), 0);
+ assert_not_equals(mediaElement.seekable.end(0), mediaElement.duration);
+ assert_not_equals(mediaElement.seekable.end(0), mediaElement.buffered.start(0));
+ assert_equals(mediaElement.seekable.end(0), mediaElement.buffered.end(0));
+ test.done();
+ });
+ });
+ });
+ }, 'Get seekable time ranges on an infinite stream.');
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-sequencemode-append-buffer.html b/testing/web-platform/tests/media-source/mediasource-sequencemode-append-buffer.html
new file mode 100644
index 0000000000..46008eeb25
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-sequencemode-append-buffer.html
@@ -0,0 +1,137 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBuffer.mode == "sequence" test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ function mediasource_sequencemode_test(testFunction, description, options)
+ {
+ return mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(segmentInfo.media.length, 3, "at least 3 media segments for supported type");
+ mediaElement.addEventListener("error", test.unreached_func("Unexpected event 'error'"));
+ sourceBuffer.mode = "sequence";
+ assert_equals(sourceBuffer.mode, "sequence", "mode after setting it to \"sequence\"");
+
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ test.expectEvent(sourceBuffer, "updatestart", "initSegment append started.");
+ test.expectEvent(sourceBuffer, "update", "initSegment append success.");
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(sourceBuffer.timestampOffset, 0, "timestampOffset initially 0");
+ testFunction(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData);
+ });
+ }, description, options);
+ }
+
+ function append_segment(test, sourceBuffer, mediaData, info, callback)
+ {
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, info);
+ test.expectEvent(sourceBuffer, "updatestart", "media segment append started.");
+ test.expectEvent(sourceBuffer, "update", "media segment append success.");
+ test.expectEvent(sourceBuffer, "updateend", "media segment append ended.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ test.waitForExpectedEvents(callback);
+ }
+
+ // Verifies expected times to 3 decimal places before and after mediaSource.endOfStream(),
+ // and calls |callback| on success.
+ function verify_offset_and_buffered(test, mediaSource, sourceBuffer,
+ expectedTimestampOffset, expectedBufferedRangeStartTime,
+ expectedBufferedRangeMaxEndTimeBeforeEOS,
+ expectedBufferedRangeEndTimeAfterEOS,
+ callback) {
+ assert_approx_equals(sourceBuffer.timestampOffset,
+ expectedTimestampOffset,
+ 0.001,
+ "expectedTimestampOffset");
+
+ // Prior to EOS, the buffered range end time may not have fully reached the next media
+ // segment's timecode (adjusted by any timestampOffset). It should not exceed it though.
+ // Therefore, an exact assertBufferedEquals() will not work here.
+ assert_greater_than(sourceBuffer.buffered.length, 0, "sourceBuffer.buffered has at least 1 range before EOS");
+ assert_approx_equals(sourceBuffer.buffered.start(0),
+ expectedBufferedRangeStartTime,
+ 0.001,
+ "sourceBuffer.buffered range begins where expected before EOS");
+ assert_less_than_equal(sourceBuffer.buffered.end(0),
+ expectedBufferedRangeMaxEndTimeBeforeEOS + 0.001,
+ "sourceBuffer.buffered range ends at or before expected upper bound before EOS");
+
+ test.expectEvent(mediaSource, "sourceended", "mediaSource endOfStream");
+ mediaSource.endOfStream();
+ test.waitForExpectedEvents(function()
+ {
+ assertBufferedEquals(sourceBuffer,
+ "{ [" + expectedBufferedRangeStartTime.toFixed(3) + ", " + expectedBufferedRangeEndTimeAfterEOS.toFixed(3) + ") }",
+ "sourceBuffer.buffered after EOS");
+ callback();
+ });
+ }
+
+ mediasource_sequencemode_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var offset = Math.min(segmentInfo.media[0].timev, segmentInfo.media[0].timea);
+ var expectedStart = Math.max(segmentInfo.media[0].timev, segmentInfo.media[0].timea) - offset;
+ var expectedEnd = Math.min(segmentInfo.media[0].endtimev, segmentInfo.media[0].endtimea) - offset;
+ var expectedEndEOS = Math.max(segmentInfo.media[0].endtimev, segmentInfo.media[0].endtimea) - offset;
+ append_segment(test, sourceBuffer, mediaData, segmentInfo.media[0], function()
+ {
+ verify_offset_and_buffered(test, mediaSource, sourceBuffer,
+ -offset, expectedStart,
+ expectedEnd, expectedEndEOS,
+ test.done);
+ });
+ }, "Test sequence AppendMode appendBuffer(first media segment)");
+
+ mediasource_sequencemode_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var offset = Math.min(segmentInfo.media[1].timev, segmentInfo.media[1].timea);
+ var expectedStart = Math.max(segmentInfo.media[1].timev, segmentInfo.media[1].timea) - offset;
+ var expectedEnd = Math.min(segmentInfo.media[1].endtimev, segmentInfo.media[1].endtimea) - offset;
+ var expectedEndEOS = Math.max(segmentInfo.media[1].endtimev, segmentInfo.media[1].endtimea) - offset;
+ assert_greater_than(Math.min(segmentInfo.media[1].timev, segmentInfo.media[1].timea), 0,
+ "segment starts after time 0");
+ append_segment(test, sourceBuffer, mediaData, segmentInfo.media[1], function()
+ {
+ verify_offset_and_buffered(test, mediaSource, sourceBuffer,
+ -offset, expectedStart,
+ expectedEnd, expectedEndEOS,
+ test.done);
+ });
+ }, "Test sequence AppendMode appendBuffer(second media segment)");
+
+ mediasource_sequencemode_test(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_greater_than(Math.min(segmentInfo.media[1].timev, segmentInfo.media[1].timea), 0,
+ "segment starts after time 0");
+ append_segment(test, sourceBuffer, mediaData, segmentInfo.media[1], function()
+ {
+ append_segment(test, sourceBuffer, mediaData, segmentInfo.media[0], function()
+ {
+ var firstOffset = Math.min(segmentInfo.media[1].timev, segmentInfo.media[1].timea);
+ var secondOffset = Math.max(segmentInfo.media[1].endtimev, segmentInfo.media[1].endtimea) - firstOffset;
+ var expectedStart = Math.max(segmentInfo.media[1].timev, segmentInfo.media[1].timea) - firstOffset;
+ var expectedEnd = Math.min(segmentInfo.media[0].endtimev, segmentInfo.media[0].endtimea) + secondOffset;
+ var expectedEndEOS = Math.max(segmentInfo.media[0].endtimev, segmentInfo.media[0].endtimea) + secondOffset;
+ // Current timestampOffset should reflect offset required to put media[0]
+ // immediately after media[1]'s highest frame end timestamp (as was adjusted
+ // by an earlier timestampOffset).
+ verify_offset_and_buffered(test, mediaSource, sourceBuffer,
+ secondOffset, expectedStart,
+ expectedEnd, expectedEndEOS,
+ test.done);
+ })
+ });
+ }, "Test sequence AppendMode appendBuffer(second media segment, then first media segment)");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode-timestamps.html b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode-timestamps.html
new file mode 100644
index 0000000000..e7e9b8ca5f
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode-timestamps.html
@@ -0,0 +1,52 @@
+<!doctype html>
+<html>
+<head>
+ <meta charset='utf-8'>
+ <title>SourceBuffer#mode with generate timestamps flag true</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+var mimes = ['audio/aac', 'audio/mpeg'];
+
+//check the browser supports the MIME used in this test
+function isTypeSupported(mime) {
+ if(!MediaSource.isTypeSupported(mime)) {
+ this.step(function() {
+ assert_unreached("Browser doesn't support the MIME used in this test: " + mime);
+ });
+ this.done();
+ return false;
+ }
+ return true;
+}
+function mediaTest(mime) {
+ async_test(function(t) {
+ if(!isTypeSupported.bind(t)(mime)) {
+ return;
+ }
+ var mediaSource = new MediaSource();
+ mediaSource.addEventListener('sourceopen', t.step_func_done(function(e) {
+ var sourceBuffer = mediaSource.addSourceBuffer(mime);
+ assert_equals(sourceBuffer.updating, false, "SourceBuffer.updating is false");
+ assert_throws_js(TypeError,
+ function() {
+ sourceBuffer.mode = "segments";
+ },
+ 'SourceBuffer#mode with generate timestamps flag true');
+ }), false);
+ var video = document.createElement('video');
+ video.src = window.URL.createObjectURL(mediaSource);
+ }, mime + ' : ' +
+ 'If generate timestamps flag equals true and new mode equals "segments", ' +
+ 'then throw a TypeError exception and abort these steps.');
+}
+mimes.forEach(function(mime) {
+ mediaTest(mime);
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode.html b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode.html
new file mode 100644
index 0000000000..2d84fa9753
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-mode.html
@@ -0,0 +1,142 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <title>SourceBuffer.mode test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_equals(sourceBuffer.mode, 'segments', 'default append mode should be \'segments\'');
+ test.done();
+ }, 'Test initial value of SourceBuffer.mode is "segments"');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ sourceBuffer.mode = 'sequence';
+ assert_equals(sourceBuffer.mode, 'sequence', 'mode after setting it to \'sequence\'');
+
+ // Setting a mode that is not in AppendMode IDL enum should be ignored and not cause exception.
+ sourceBuffer.mode = 'invalidmode';
+ sourceBuffer.mode = null;
+ sourceBuffer.mode = '';
+ sourceBuffer.mode = 'Segments';
+ assert_equals(sourceBuffer.mode, 'sequence', 'mode unchanged by attempts to set invalid modes');
+
+ sourceBuffer.mode = 'segments';
+ assert_equals(sourceBuffer.mode, 'segments', 'mode after setting it to \'segments\'');
+ test.done();
+ }, 'Test setting SourceBuffer.mode');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_throws_dom('InvalidStateError',
+ function() { sourceBuffer.mode = 'segments'; },
+ 'Setting valid sourceBuffer.mode on removed SourceBuffer should throw InvalidStateError.');
+ test.done();
+ }, 'Test setting a removed SourceBuffer\'s mode');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, 'updating attribute is true');
+ assert_throws_dom('InvalidStateError',
+ function() { sourceBuffer.mode = 'segments'; },
+ 'Setting valid sourceBuffer.mode on updating SourceBuffer threw InvalidStateError.');
+ test.done();
+ }, 'Test setting SourceBuffer.mode while still updating');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ test.expectEvent(sourceBuffer, 'updateend', 'Append ended.');
+ sourceBuffer.appendBuffer(mediaData);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'updating attribute is false');
+
+ test.expectEvent(mediaSource, 'sourceended', 'MediaSource sourceended event');
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, 'ended', 'MediaSource readyState is \'ended\'');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, 'ended', 'MediaSource readyState is \'ended\'');
+
+ test.expectEvent(mediaSource, 'sourceopen', 'MediaSource sourceopen event');
+ sourceBuffer.mode = 'segments';
+
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource readyState is \'open\'');
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource readyState is \'open\'');
+ test.done();
+ });
+ }, 'Test setting SourceBuffer.mode triggers parent MediaSource \'ended\' to \'open\' transition.');
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var fullMediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+ var truncateAt = Math.floor(segmentInfo.media[0].size * 0.5); // Pick first 50% of segment bytes.
+ var partialMediaSegment = fullMediaSegment.subarray(0, truncateAt);
+ var mediaSegmentRemainder = fullMediaSegment.subarray(truncateAt);
+
+ // Append init segment.
+ test.expectEvent(sourceBuffer, 'updateend', 'Init segment append ended.');
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'updating attribute is false');
+ assert_equals(sourceBuffer.mode, 'segments');
+ sourceBuffer.mode = 'segments'; // No exception should occur.
+ assert_equals(sourceBuffer.timestampOffset, 0.0);
+ sourceBuffer.timestampOffset = 10.123456789; // No exception should occur.
+ assert_equals(sourceBuffer.timestampOffset, 10.123456789); // Super-precise offsets should round-trip.
+
+ // Append first part of media segment.
+ test.expectEvent(sourceBuffer, 'updateend', 'Partial media segment append ended.');
+ sourceBuffer.appendBuffer(partialMediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'updating attribute is false');
+ assert_equals(sourceBuffer.mode, 'segments');
+ assert_throws_dom('InvalidStateError',
+ function() { sourceBuffer.mode = 'segments'; },
+ 'Setting valid sourceBuffer.mode while still parsing media segment threw InvalidStateError.');
+ assert_equals(sourceBuffer.timestampOffset, 10.123456789);
+ assert_throws_dom('InvalidStateError',
+ function() { sourceBuffer.timestampOffset = 20.0; },
+ 'Setting valid sourceBuffer.timestampOffset while still parsing media segment threw InvalidStateError.');
+
+ // Append remainder of media segment.
+ test.expectEvent(sourceBuffer, 'updateend', 'Append ended of remainder of media segment.');
+ sourceBuffer.appendBuffer(mediaSegmentRemainder);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, 'updating attribute is false');
+ assert_equals(sourceBuffer.mode, 'segments');
+ sourceBuffer.mode = 'segments'; // No exception should occur.
+ assert_equals(sourceBuffer.timestampOffset, 10.123456789);
+ sourceBuffer.timestampOffset = 20.0; // No exception should occur.
+ test.done();
+ });
+ }, 'Test setting SourceBuffer.mode and SourceBuffer.timestampOffset while parsing media segment.');
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-sourcebuffer-trackdefaults.html b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-trackdefaults.html
new file mode 100644
index 0000000000..7b45486d71
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-sourcebuffer-trackdefaults.html
@@ -0,0 +1,78 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="mediasource-util.js"></script>
+<script>
+ function sourceBufferTrackDefaultsTest(callback, description)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_array_equals(sourceBuffer.trackDefaults, [], "Empty initial SourceBuffer.trackDefaults");
+ callback(test, mediaElement, mediaSource, sourceBuffer);
+ }, description);
+ };
+
+ sourceBufferTrackDefaultsTest(function(test, mediaElement, mediaSource, sourceBuffer)
+ {
+ var emptyList = new TrackDefaultList([]);
+ assert_not_equals(sourceBuffer.trackDefaults, emptyList, "Initial trackDefaults object differs from new empty list");
+
+ sourceBuffer.trackDefaults = emptyList;
+
+ assert_array_equals(sourceBuffer.trackDefaults, [], "Round-tripped empty trackDefaults");
+ assert_equals(sourceBuffer.trackDefaults, emptyList, "Round-tripped the empty TrackDefaultList object");
+ test.done();
+ }, "Test round-trip of empty SourceBuffer.trackDefaults");
+
+ sourceBufferTrackDefaultsTest(function(test, mediaElement, mediaSource, sourceBuffer)
+ {
+ var trackDefault = new TrackDefault("audio", "en-US", "audio label", ["main"], "1");
+ var trackDefaults = new TrackDefaultList([ trackDefault ]);
+
+ sourceBuffer.trackDefaults = trackDefaults;
+
+ assert_array_equals(sourceBuffer.trackDefaults, trackDefaults, "Round-tripped non-empty trackDefaults");
+ assert_equals(sourceBuffer.trackDefaults.length, 1, "Confirmed non-empty trackDefaults");
+ assert_equals(sourceBuffer.trackDefaults, trackDefaults, "Round-tripped the non-empty TrackDefaultList object");
+ test.done();
+ }, "Test round-trip of non-empty SourceBuffer.trackDefaults");
+
+ sourceBufferTrackDefaultsTest(function(test, mediaElement, mediaSource, sourceBuffer)
+ {
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.trackDefaults = new TrackDefaultList([]); },
+ "Exception thrown when setting trackDefaults on SourceBuffer that is removed from MediaSource");
+ test.done();
+ }, "Test setting trackDefaults on an already-removed SourceBuffer");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ assert_array_equals(sourceBuffer.trackDefaults, [], "Empty initial SourceBuffer.trackDefaults");
+ test.expectEvent(sourceBuffer, "updateend", "Append ended");
+ sourceBuffer.appendBuffer(mediaData);
+ assert_true(sourceBuffer.updating, "SourceBuffer is updating");
+
+ assert_throws_dom("InvalidStateError",
+ function() { sourceBuffer.trackDefaults = new TrackDefaultList([]); },
+ "Exception thrown when setting trackDefaults on SourceBuffer that is updating");
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_false(sourceBuffer.updating, "SourceBuffer is not updating");
+ sourceBuffer.trackDefaults = new TrackDefaultList([]);
+ test.done();
+ });
+ }, "Test setting trackDefaults on a SourceBuffer that is updating");
+
+ sourceBufferTrackDefaultsTest(function(test, mediaElement, mediaSource, sourceBuffer)
+ {
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.trackDefaults = null; },
+ "null should be disallowed by trackDefaults setter");
+ test.done();
+ }, "Test setting null SourceBuffer.trackDefaults");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-sourcebufferlist.html b/testing/web-platform/tests/media-source/mediasource-sourcebufferlist.html
new file mode 100644
index 0000000000..760e6df46c
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-sourcebufferlist.html
@@ -0,0 +1,97 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBufferList test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ function verifySourceBufferLists(mediaSource, expected)
+ {
+ assert_equals(mediaSource.sourceBuffers.length, expected.length, "sourceBuffers length");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "activeSourceBuffers length");
+ for (var i = 0; i < expected.length; ++i) {
+ assert_equals(mediaSource.sourceBuffers[i], expected[i], "Verifying mediaSource.sourceBuffers[" + i + "]");
+ }
+ assert_equals(mediaSource.sourceBuffers[expected.length], undefined,
+ "If index is greater than or equal to the length attribute then return undefined.");
+ }
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBufferA = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+ verifySourceBufferLists(mediaSource, [sourceBufferA]);
+
+ var sourceBufferB = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+ verifySourceBufferLists(mediaSource, [sourceBufferA, sourceBufferB]);
+ test.done();
+ }, "Test SourceBufferList getter method");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "addsourcebuffer", "sourceBuffers");
+ var sourceBufferA = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+ var sourceBufferB = null;
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "addsourcebuffer", "sourceBuffers");
+ sourceBufferB = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ verifySourceBufferLists(mediaSource, [sourceBufferA, sourceBufferB]);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "sourceBuffers");
+ mediaSource.removeSourceBuffer(sourceBufferA);
+
+ verifySourceBufferLists(mediaSource, [sourceBufferB]);
+
+ test.expectEvent(mediaSource.sourceBuffers, "addsourcebuffer", "sourceBuffers");
+ sourceBufferA = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+
+ verifySourceBufferLists(mediaSource, [sourceBufferB, sourceBufferA]);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test SourceBufferList event dispatching.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ test.expectEvent(mediaSource.sourceBuffers, "addsourcebuffer", "sourceBuffers");
+ test.expectEvent(mediaSource.sourceBuffers, "addsourcebuffer", "sourceBuffers");
+ var sourceBufferA = mediaSource.addSourceBuffer(MediaSourceUtil.VIDEO_ONLY_TYPE);
+ var sourceBufferB = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_ONLY_TYPE);
+
+ verifySourceBufferLists(mediaSource, [sourceBufferA, sourceBufferB]);
+
+ test.waitForExpectedEvents(function()
+ {
+ verifySourceBufferLists(mediaSource, [sourceBufferA, sourceBufferB]);
+
+ // Force the media element to close the MediaSource object.
+ test.expectEvent(mediaSource.sourceBuffers, "removesourcebuffer", "sourceBuffers");
+ test.expectEvent(mediaSource, "sourceclose", "mediaSource closing");
+ mediaElement.src = "";
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.readyState, "closed", "mediaSource is closed.");
+
+ verifySourceBufferLists(mediaSource, []);
+ test.done();
+ });
+ }, "Test that only 1 removesourcebuffer event fires on each SourceBufferList when the MediaSource closes.");
+
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-timestamp-offset.html b/testing/web-platform/tests/media-source/mediasource-timestamp-offset.html
new file mode 100644
index 0000000000..8381aceeb0
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-timestamp-offset.html
@@ -0,0 +1,125 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<html>
+ <head>
+ <title>SourceBuffer.timestampOffset test cases.</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="mediasource-util.js"></script>
+ </head>
+ <body>
+ <div id="log"></div>
+ <script>
+ function simpleTimestampOffsetTest(value, expected, description)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var segmentInfo = MediaSourceUtil.SEGMENT_INFO;
+ var sourceBuffer = mediaSource.addSourceBuffer(segmentInfo.type);
+
+ assert_equals(sourceBuffer.timestampOffset, 0,
+ "Initial timestampOffset of a SourceBuffer is 0");
+
+ if (expected == "TypeError") {
+ assert_throws_js(TypeError,
+ function() { sourceBuffer.timestampOffset = value; },
+ "setting timestampOffset to " + description + " throws an exception.");
+ } else {
+ sourceBuffer.timestampOffset = value;
+ assert_equals(sourceBuffer.timestampOffset, expected);
+ }
+
+ test.done();
+ }, "Test setting SourceBuffer.timestampOffset to " + description + ".");
+ }
+
+ simpleTimestampOffsetTest(10.5, 10.5, "a positive number");
+ simpleTimestampOffsetTest(-10.4, -10.4, "a negative number");
+ simpleTimestampOffsetTest(0, 0, "zero");
+ simpleTimestampOffsetTest(Number.POSITIVE_INFINITY, "TypeError", "positive infinity");
+ simpleTimestampOffsetTest(Number.NEGATIVE_INFINITY, "TypeError", "negative infinity");
+ simpleTimestampOffsetTest(Number.NaN, "TypeError", "NaN");
+ simpleTimestampOffsetTest(undefined, "TypeError", "undefined");
+ simpleTimestampOffsetTest(null, 0, "null");
+ simpleTimestampOffsetTest(false, 0, "false");
+ simpleTimestampOffsetTest(true, 1, "true");
+ simpleTimestampOffsetTest("10.5", 10.5, "a number string");
+ simpleTimestampOffsetTest("", 0, "an empty string");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ mediaSource.endOfStream();
+
+ assert_equals(mediaSource.readyState, "ended");
+
+ mediaSource.sourceBuffers[0].timestampOffset = 2;
+
+ assert_equals(mediaSource.readyState, "open");
+
+ test.expectEvent(mediaSource, "sourceopen", "mediaSource fired 'sourceopen' event.");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ test.done();
+ });
+ }, "Test setting timestampOffset in 'ended' state causes a transition to 'open'.");
+
+ mediasource_testafterdataloaded(function(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData)
+ {
+ var initSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.init);
+ var mediaSegment = MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[0]);
+
+ test.expectEvent(sourceBuffer, "updateend", "initSegment append ended.");
+ sourceBuffer.appendBuffer(initSegment);
+ assert_equals(mediaSource.sourceBuffers[0].timestampOffset, 0, "read initial value");
+
+ test.waitForExpectedEvents(function()
+ {
+ test.expectEvent(sourceBuffer, "updateend", "mediaSegment append ended.");
+ sourceBuffer.appendBuffer(mediaSegment);
+ assert_equals(mediaSource.sourceBuffers[0].timestampOffset, 0,
+ "No change to timestampoffset after segments mode init segment append");
+ });
+
+ test.waitForExpectedEvents(function()
+ {
+ assert_equals(mediaSource.sourceBuffers[0].timestampOffset, 0,
+ "No change to timestampoffset after segments mode media segment append");
+ test.done();
+ });
+ }, "Test getting the initial value of timestampOffset.");
+
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var sourceBuffer = mediaSource.addSourceBuffer(MediaSourceUtil.AUDIO_VIDEO_TYPE);
+ assert_true(sourceBuffer != null, "New SourceBuffer returned");
+
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ assert_equals(mediaSource.sourceBuffers.length, 0, "MediaSource.sourceBuffers is empty");
+ assert_equals(mediaSource.activeSourceBuffers.length, 0, "MediaSource.activesourceBuffers is empty");
+
+ assert_throws_dom("InvalidStateError", function()
+ {
+ sourceBuffer.timestampOffset = 10;
+ });
+
+ test.done();
+ }, "Test setting timestampoffset after removing the sourcebuffer.");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/media-source/mediasource-trackdefault.html b/testing/web-platform/tests/media-source/mediasource-trackdefault.html
new file mode 100644
index 0000000000..e6c9e76ef9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-trackdefault.html
@@ -0,0 +1,101 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+ function checkConstructionSucceeds(type, language, label, kinds, byteStreamTrackID)
+ {
+ var trackDefault = new TrackDefault(type, language, label, kinds, byteStreamTrackID);
+ assert_equals(trackDefault.type, type, "type");
+ assert_equals(trackDefault.language, language, "language");
+ assert_equals(trackDefault.label, label, "label");
+ assert_equals(trackDefault.byteStreamTrackID, byteStreamTrackID, "byteStreamTrackID");
+ assert_array_equals(trackDefault.kinds, kinds, "kinds");
+ }
+
+ function checkConstructionFails(type, language, label, kinds, byteStreamTrackID)
+ {
+ assert_throws_js(TypeError,
+ function() { new TrackDefault(type, language, label, kinds, byteStreamTrackID); },
+ "TrackDefault construction threw an exception");
+ }
+
+ function trackDefaultConstructionTest(type, language, label, kinds, byteStreamTrackID, expectation, description)
+ {
+ test(function()
+ {
+ if (expectation)
+ checkConstructionSucceeds(type, language, label, kinds, byteStreamTrackID);
+ else
+ checkConstructionFails(type, language, label, kinds, byteStreamTrackID);
+ }, description + ": type '" + type + "', language '" + language + "', label '" + label + "', multiple kinds, byteStreamTrackID '" + byteStreamTrackID + "'");
+
+ // If all of |kinds| are expected to succeed, also test each kind individually.
+ if (!expectation || kinds.length <= 1)
+ return;
+ for (var i = 0; i < kinds.length; ++i) {
+ test(function()
+ {
+ checkConstructionSucceeds(type, language, label, new Array(kinds[i]), byteStreamTrackID);
+ }, description + ": type '" + type + "', language '" + language + "', label '" + label + "', kind '" + kinds[i] + "', byteStreamTrackID '" + byteStreamTrackID + "'");
+ }
+ }
+
+ var VALID_AUDIO_TRACK_KINDS = [
+ "alternative",
+ "descriptions",
+ "main",
+ "main-desc",
+ "translation",
+ "commentary",
+ "",
+ ];
+
+ var VALID_VIDEO_TRACK_KINDS = [
+ "alternative",
+ "captions",
+ "main",
+ "sign",
+ "subtitles",
+ "commentary",
+ "",
+ ];
+
+ var VALID_TEXT_TRACK_KINDS = [
+ "subtitles",
+ "captions",
+ "descriptions",
+ "chapters",
+ "metadata",
+ ];
+
+ trackDefaultConstructionTest("audio", "en-US", "audio label", VALID_AUDIO_TRACK_KINDS, "1", true, "Test valid audio kinds");
+
+ trackDefaultConstructionTest("video", "en-US", "video label", VALID_VIDEO_TRACK_KINDS, "1", true, "Test valid video kinds");
+
+ trackDefaultConstructionTest("text", "en-US", "text label", VALID_TEXT_TRACK_KINDS, "1", true, "Test valid text kinds");
+
+ trackDefaultConstructionTest("audio", "en-US", "audio label", VALID_VIDEO_TRACK_KINDS, "1", false, "Test mixed valid and invalid audio kinds");
+
+ trackDefaultConstructionTest("video", "en-US", "video label", VALID_AUDIO_TRACK_KINDS, "1", false, "Test mixed valid and invalid video kinds");
+
+ trackDefaultConstructionTest("text", "en-US", "text label", VALID_VIDEO_TRACK_KINDS, "1", false, "Test mixed valid and invalid text kinds");
+
+ trackDefaultConstructionTest("invalid type", "en-US", "label", VALID_AUDIO_TRACK_KINDS, "1", false, "Test invalid 'type' parameter type passed to TrackDefault constructor");
+
+ test(function()
+ {
+ checkConstructionFails("audio", "en-US", "label", "this is not a valid sequence", "1");
+ }, "Test invalid 'kinds' parameter type passed to TrackDefault constructor");
+
+ test(function()
+ {
+ var trackDefault = new TrackDefault("audio", "en-US", "label", VALID_AUDIO_TRACK_KINDS, "1");
+ var kinds = trackDefault.kinds;
+ kinds[0] = "invalid kind";
+ assert_equals(kinds[0], "invalid kind", "local kinds is updated");
+ assert_equals(VALID_AUDIO_TRACK_KINDS[0], "alternative", "local original kinds unchanged");
+ assert_array_equals(trackDefault.kinds, VALID_AUDIO_TRACK_KINDS, "trackDefault kinds unchanged");
+ }, "Test updating the retval of TrackDefault.kinds does not modify TrackDefault.kinds");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-trackdefaultlist.html b/testing/web-platform/tests/media-source/mediasource-trackdefaultlist.html
new file mode 100644
index 0000000000..940260cfd2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-trackdefaultlist.html
@@ -0,0 +1,60 @@
+<!DOCTYPE html>
+<!-- Copyright © 2016 Chromium authors and World Wide Web Consortium, (Massachusetts Institute of Technology, ERCIM, Keio University, Beihang). -->
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+ test(function()
+ {
+ var originalTrackDefaults = [
+ // Same everything, but different byteStreamTrackID, should be allowed by the constructor.
+ new TrackDefault("audio", "en-US", "label", ["main"], ""),
+ new TrackDefault("audio", "en-US", "label", ["main"], "1"),
+ new TrackDefault("audio", "en-US", "label", ["main"], "2"),
+ new TrackDefault("audio", "en-US", "label", [""], "3"),
+
+ // Same everything, but different type, should be allowed by the constructor.
+ new TrackDefault("video", "en-US", "label", ["main"], ""),
+ new TrackDefault("video", "en-US", "label", ["main"], "1"),
+ new TrackDefault("video", "en-US", "label", ["main"], "2"),
+ new TrackDefault("video", "en-US", "label", [""], "3")
+ ];
+
+ // Get a new array containing the same objects as |originalTrackDefaults|.
+ var trackDefaults = originalTrackDefaults.slice();
+
+ var trackDefaultList = new TrackDefaultList(trackDefaults);
+ assert_array_equals(trackDefaultList, originalTrackDefaults, "construction and read-back succeeded");
+ assert_equals(trackDefaultList[trackDefaultList.length], undefined, "out of range indexed property getter result is undefined");
+ assert_equals(trackDefaultList[trackDefaultList.length + 1], undefined, "out of range indexed property getter result is undefined");
+
+ // Introduce same-type, same-empty-string-byteStreamTrackId conflict in trackDefaults[0 vs 4].
+ trackDefaults[4] = new TrackDefault("audio", "en-US", "label", ["main"], "");
+ assert_equals(trackDefaults[0].type, trackDefaults[4].type, "same-type conflict setup");
+ assert_equals(trackDefaults[0].byteStreamTrackID, trackDefaults[4].byteStreamTrackID, "same-byteStreamTrackID conflict setup");
+ assert_throws_dom("InvalidAccessError",
+ function() { new TrackDefaultList(trackDefaults); },
+ "TrackDefaultList construction should throw exception due to same type and byteStreamTrackID across at least 2 items in trackDefaults");
+
+ // Introduce same-type, same-non-empty-string-byteStreamTrackId conflict in trackDefaults[4 vs 5].
+ trackDefaults[4] = new TrackDefault("video", "en-US", "label", ["main"], "1");
+ assert_equals(trackDefaults[4].type, trackDefaults[5].type, "same-type conflict setup");
+ assert_equals(trackDefaults[4].byteStreamTrackID, trackDefaults[5].byteStreamTrackID, "same-byteStreamTrackID conflict setup");
+ assert_throws_dom("InvalidAccessError",
+ function() { new TrackDefaultList(trackDefaults); },
+ "TrackDefaultList construction should throw exception due to same type and byteStreamTrackID across at least 2 items in trackDefaults");
+
+ // Confirm the constructed TrackDefaultList makes a shallow copy of the supplied TrackDefault sequence.
+ assert_array_equals(trackDefaultList, originalTrackDefaults, "read-back of original trackDefaultList unchanged");
+
+ }, "Test track default list construction, length, and indexed property getter");
+
+ test(function()
+ {
+ var trackDefaultList = new TrackDefaultList();
+ assert_array_equals(trackDefaultList, [], "empty list constructable without supplying optional trackDefaults parameter");
+
+ trackDefaultList = new TrackDefaultList([]);
+ assert_array_equals(trackDefaultList, [], "empty list constructable by supplying empty sequence as optional trackDefaults parameter");
+ }, "Test empty track default list construction with and without optional trackDefaults parameter");
+</script>
diff --git a/testing/web-platform/tests/media-source/mediasource-util.js b/testing/web-platform/tests/media-source/mediasource-util.js
new file mode 100644
index 0000000000..6b11210052
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mediasource-util.js
@@ -0,0 +1,412 @@
+(function(window) {
+ var SEGMENT_INFO_LIST = [
+ {
+ url: 'mp4/test.mp4',
+ type: 'video/mp4; codecs="mp4a.40.2,avc1.4d400d"',
+ duration: 6.549,
+ init: { offset: 0, size: 1413 },
+ media: [
+ { offset: 1413, size: 24034, timev: 0.095000, timea: 0, endtimev: 0.896666, endtimea: 0.882358 },
+ { offset: 25447, size: 21757, timev: 0.896666, timea: 0.882358, endtimev: 1.696666, endtimea: 1.671836 },
+ { offset: 47204, size: 23591, timev: 1.696666, timea: 1.671836, endtimev: 2.498333, endtimea: 2.461315 },
+ { offset: 70795, size: 22614, timev: 2.498333, timea: 2.461315, endtimev: 3.298333, endtimea: 3.297233 },
+ { offset: 93409, size: 18353, timev: 3.298333, timea: 3.297233, endtimev: 4.100000, endtimea: 4.086712},
+ { offset: 111762, size: 23935, timev: 4.100000, timea: 4.086712, endtimev: 4.900000, endtimea: 4.876190 },
+ { offset: 135697, size: 21911, timev: 4.900000, timea: 4.876190, endtimev: 5.701666, endtimea: 5.665668 },
+ { offset: 157608, size: 23776, timev: 5.701666, timea: 5.665668, endtimev: 6.501666, endtimea: 6.501587 },
+ { offset: 181384, size: 5843, timev: 6.501666, timea: 6.501587, endtimev: 6.501666, endtimea: 6.501678 },
+ ]
+ },
+ {
+ url: 'webm/test.webm',
+ type: 'video/webm; codecs="vp8, vorbis"',
+ duration: 6.552,
+ init: { offset: 0, size: 4116 },
+ media: [
+ { offset: 4116, size: 26583, timev: 0.112000, timea: 0, endtimev: 0.913000, endtimea: 0.912000 },
+ { offset: 30699, size: 20555, timev: 0.913000, timea: 0.912000, endtimev: 1.714000, endtimea: 1.701000 },
+ { offset: 51254, size: 22668, timev: 1.714000, timea: 1.701000, endtimev: 2.515000, endtimea: 2.514000 },
+ { offset: 73922, size: 21943, timev: 2.515000, timea: 2.514000, endtimev: 3.315000, endtimea: 3.303000 },
+ { offset: 95865, size: 23015, timev: 3.315000, timea: 3.303000, endtimev: 4.116000, endtimea: 4.093000},
+ { offset: 118880, size: 20406, timev: 4.116000, timea: 4.093000, endtimev: 4.917000, endtimea: 4.906000 },
+ { offset: 139286, size: 21537, timev: 4.917000, timea: 4.906000, endtimev: 5.718000, endtimea: 5.695000 },
+ { offset: 160823, size: 24027, timev: 5.718000, timea: 5.695000, endtimev: 6.519000, endtimea: 6.508000 },
+ { offset: 184850, size: 5955, timev: 6.519000, timea: 6.508000, endtimev: 6.577000, endtimea: 6.577000},
+ ],
+ }
+ ];
+ EventExpectationsManager = function(test)
+ {
+ this.test_ = test;
+ this.eventTargetList_ = [];
+ this.waitCallbacks_ = [];
+ };
+
+ EventExpectationsManager.prototype.expectEvent = function(object, eventName, description)
+ {
+ var eventInfo = { 'target': object, 'type': eventName, 'description': description};
+ var expectations = this.getExpectations_(object);
+ expectations.push(eventInfo);
+
+ var t = this;
+ var waitHandler = this.test_.step_func(this.handleWaitCallback_.bind(this));
+ var eventHandler = this.test_.step_func(function(event)
+ {
+ object.removeEventListener(eventName, eventHandler);
+ var expected = expectations[0];
+ assert_equals(event.target, expected.target, "Event target match.");
+ assert_equals(event.type, expected.type, "Event types match.");
+ assert_equals(eventInfo.description, expected.description, "Descriptions match for '" + event.type + "'.");
+
+ expectations.shift(1);
+ if (t.waitCallbacks_.length > 1)
+ setTimeout(waitHandler, 0);
+ else if (t.waitCallbacks_.length == 1) {
+ // Immediately call the callback.
+ waitHandler();
+ }
+ });
+ object.addEventListener(eventName, eventHandler);
+ };
+
+ EventExpectationsManager.prototype.waitForExpectedEvents = function(callback)
+ {
+ this.waitCallbacks_.push(callback);
+ setTimeout(this.test_.step_func(this.handleWaitCallback_.bind(this)), 0);
+ };
+
+ EventExpectationsManager.prototype.expectingEvents = function()
+ {
+ for (var i = 0; i < this.eventTargetList_.length; ++i) {
+ if (this.eventTargetList_[i].expectations.length > 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ EventExpectationsManager.prototype.handleWaitCallback_ = function()
+ {
+ if (this.waitCallbacks_.length == 0 || this.expectingEvents())
+ return;
+ var callback = this.waitCallbacks_.shift(1);
+ callback();
+ };
+
+ EventExpectationsManager.prototype.getExpectations_ = function(target)
+ {
+ for (var i = 0; i < this.eventTargetList_.length; ++i) {
+ var info = this.eventTargetList_[i];
+ if (info.target == target) {
+ return info.expectations;
+ }
+ }
+ var expectations = [];
+ this.eventTargetList_.push({ 'target': target, 'expectations': expectations });
+ return expectations;
+ };
+
+ function loadData_(test, url, callback, isBinary)
+ {
+ var request = new XMLHttpRequest();
+ request.open("GET", url, true);
+ if (isBinary) {
+ request.responseType = 'arraybuffer';
+ }
+ request.onload = test.step_func(function(event)
+ {
+ if (request.status != 200) {
+ assert_unreached("Unexpected status code : " + request.status);
+ return;
+ }
+ var response = request.response;
+ if (isBinary) {
+ response = new Uint8Array(response);
+ }
+ callback(response);
+ });
+ request.onerror = test.step_func(function(event)
+ {
+ assert_unreached("Unexpected error");
+ });
+ request.send();
+ }
+
+ function openMediaSource_(test, mediaTag, callback)
+ {
+ var mediaSource = new MediaSource();
+ var mediaSourceURL = URL.createObjectURL(mediaSource);
+
+ var eventHandler = test.step_func(onSourceOpen);
+ function onSourceOpen(event)
+ {
+ mediaSource.removeEventListener('sourceopen', eventHandler);
+ URL.revokeObjectURL(mediaSourceURL);
+ callback(mediaSource);
+ }
+
+ mediaSource.addEventListener('sourceopen', eventHandler);
+ mediaTag.src = mediaSourceURL;
+ }
+
+ var MediaSourceUtil = {};
+
+ MediaSourceUtil.loadTextData = function(test, url, callback)
+ {
+ loadData_(test, url, callback, false);
+ };
+
+ MediaSourceUtil.loadBinaryData = function(test, url, callback)
+ {
+ loadData_(test, url, callback, true);
+ };
+
+ MediaSourceUtil.fetchManifestAndData = function(test, manifestFilename, callback)
+ {
+ var baseURL = '';
+ var manifestURL = baseURL + manifestFilename;
+ MediaSourceUtil.loadTextData(test, manifestURL, function(manifestText)
+ {
+ var manifest = JSON.parse(manifestText);
+
+ assert_true(MediaSource.isTypeSupported(manifest.type), manifest.type + " is supported.");
+
+ var mediaURL = baseURL + manifest.url;
+ MediaSourceUtil.loadBinaryData(test, mediaURL, function(mediaData)
+ {
+ callback(manifest.type, mediaData);
+ });
+ });
+ };
+
+ MediaSourceUtil.extractSegmentData = function(mediaData, info)
+ {
+ var start = info.offset;
+ var end = start + info.size;
+ return mediaData.subarray(start, end);
+ }
+
+ MediaSourceUtil.getMediaDataForPlaybackTime = function(mediaData, segmentInfo, playbackTimeToAdd)
+ {
+ assert_less_than_equal(playbackTimeToAdd, segmentInfo.duration);
+ var mediaInfo = segmentInfo.media;
+ var start = mediaInfo[0].offset;
+ var numBytes = 0;
+ var segmentIndex = 0;
+ while (segmentIndex < mediaInfo.length
+ && Math.min(mediaInfo[segmentIndex].timev, mediaInfo[segmentIndex].timea) <= playbackTimeToAdd)
+ {
+ numBytes += mediaInfo[segmentIndex].size;
+ ++segmentIndex;
+ }
+ return mediaData.subarray(start, numBytes + start);
+ }
+
+ function getFirstSupportedType(typeList)
+ {
+ for (var i = 0; i < typeList.length; ++i) {
+ if (window.MediaSource && MediaSource.isTypeSupported(typeList[i]))
+ return typeList[i];
+ }
+ return "";
+ }
+
+ function getSegmentInfo()
+ {
+ for (var i = 0; i < SEGMENT_INFO_LIST.length; ++i) {
+ var segmentInfo = SEGMENT_INFO_LIST[i];
+ if (window.MediaSource && MediaSource.isTypeSupported(segmentInfo.type)) {
+ return segmentInfo;
+ }
+ }
+ return null;
+ }
+
+ // To support mediasource-changetype tests, do not use any types that
+ // indicate automatic timestamp generation in this audioOnlyTypes list.
+ var audioOnlyTypes = ['audio/mp4;codecs="mp4a.40.2"', 'audio/webm;codecs="vorbis"'];
+
+ var videoOnlyTypes = ['video/mp4;codecs="avc1.4D4001"', 'video/webm;codecs="vp8"'];
+ var audioVideoTypes = ['video/mp4;codecs="avc1.4D4001,mp4a.40.2"', 'video/webm;codecs="vp8,vorbis"'];
+ MediaSourceUtil.AUDIO_ONLY_TYPE = getFirstSupportedType(audioOnlyTypes);
+ MediaSourceUtil.VIDEO_ONLY_TYPE = getFirstSupportedType(videoOnlyTypes);
+ MediaSourceUtil.AUDIO_VIDEO_TYPE = getFirstSupportedType(audioVideoTypes);
+ MediaSourceUtil.SEGMENT_INFO = getSegmentInfo();
+
+ MediaSourceUtil.getSubType = function(mimetype) {
+ var slashIndex = mimetype.indexOf("/");
+ var semicolonIndex = mimetype.indexOf(";");
+ if (slashIndex <= 0) {
+ assert_unreached("Invalid mimetype '" + mimetype + "'");
+ return;
+ }
+
+ var start = slashIndex + 1;
+ if (semicolonIndex >= 0) {
+ if (semicolonIndex <= start) {
+ assert_unreached("Invalid mimetype '" + mimetype + "'");
+ return;
+ }
+
+ return mimetype.substr(start, semicolonIndex - start)
+ }
+
+ return mimetype.substr(start);
+ };
+
+ MediaSourceUtil.append = function(test, sourceBuffer, data, callback)
+ {
+ function onUpdate() {
+ sourceBuffer.removeEventListener("update", onUpdate);
+ callback();
+ }
+ sourceBuffer.addEventListener("update", onUpdate);
+
+ sourceBuffer.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ sourceBuffer.appendBuffer(data);
+ };
+
+ MediaSourceUtil.appendUntilEventFires = function(test, mediaElement, eventName, sourceBuffer, mediaData, segmentInfo, startingIndex)
+ {
+ var eventFired = false;
+ function onEvent() {
+ mediaElement.removeEventListener(eventName, onEvent);
+ eventFired = true;
+ }
+ mediaElement.addEventListener(eventName, onEvent);
+
+ var i = startingIndex;
+ var onAppendDone = function() {
+ if (eventFired || (i >= (segmentInfo.media.length - 1)))
+ return;
+
+ i++;
+ if (i < segmentInfo.media.length)
+ {
+ MediaSourceUtil.append(test, sourceBuffer, MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[i]), onAppendDone);
+ }
+ };
+ MediaSourceUtil.append(test, sourceBuffer, MediaSourceUtil.extractSegmentData(mediaData, segmentInfo.media[i]), onAppendDone);
+
+ };
+
+ function addExtraTestMethods(test)
+ {
+ test.eventExpectations_ = new EventExpectationsManager(test);
+ test.expectEvent = function(object, eventName, description)
+ {
+ test.eventExpectations_.expectEvent(object, eventName, description);
+ };
+
+ test.waitForExpectedEvents = function(callback)
+ {
+ test.eventExpectations_.waitForExpectedEvents(callback);
+ };
+
+ test.waitForCurrentTimeChange = function(mediaElement, callback)
+ {
+ var initialTime = mediaElement.currentTime;
+
+ var onTimeUpdate = test.step_func(function()
+ {
+ if (mediaElement.currentTime != initialTime) {
+ mediaElement.removeEventListener('timeupdate', onTimeUpdate);
+ callback();
+ }
+ });
+
+ mediaElement.addEventListener('timeupdate', onTimeUpdate);
+ }
+
+ var oldTestDone = test.done.bind(test);
+ test.done = function()
+ {
+ if (test.status == test.PASS) {
+ test.step(function() {
+ assert_false(test.eventExpectations_.expectingEvents(), "No pending event expectations.");
+ });
+ }
+ oldTestDone();
+ };
+ };
+
+ window['MediaSourceUtil'] = MediaSourceUtil;
+ window['media_test'] = function(testFunction, description, options)
+ {
+ options = options || {};
+ return async_test(function(test)
+ {
+ addExtraTestMethods(test);
+ testFunction(test);
+ }, description, options);
+ };
+ window['mediasource_test'] = function(testFunction, description, options)
+ {
+ return media_test(function(test)
+ {
+ var mediaTag = document.createElement("video");
+ if (!document.body) {
+ document.body = document.createElement("body");
+ }
+ document.body.appendChild(mediaTag);
+
+ test.removeMediaElement_ = true;
+ test.add_cleanup(function()
+ {
+ if (test.removeMediaElement_) {
+ document.body.removeChild(mediaTag);
+ test.removeMediaElement_ = false;
+ }
+ });
+
+ openMediaSource_(test, mediaTag, function(mediaSource)
+ {
+ testFunction(test, mediaTag, mediaSource);
+ });
+ }, description, options);
+ };
+
+ window['mediasource_testafterdataloaded'] = function(testFunction, description, options)
+ {
+ mediasource_test(function(test, mediaElement, mediaSource)
+ {
+ var segmentInfo = MediaSourceUtil.SEGMENT_INFO;
+
+ if (!segmentInfo) {
+ assert_unreached("No segment info compatible with this MediaSource implementation.");
+ return;
+ }
+
+ mediaElement.addEventListener('error', test.unreached_func("Unexpected event 'error'"));
+
+ var sourceBuffer = mediaSource.addSourceBuffer(segmentInfo.type);
+ MediaSourceUtil.loadBinaryData(test, segmentInfo.url, function(mediaData)
+ {
+ testFunction(test, mediaElement, mediaSource, segmentInfo, sourceBuffer, mediaData);
+ });
+ }, description, options);
+ }
+
+ function timeRangesToString(ranges)
+ {
+ var s = "{";
+ for (var i = 0; i < ranges.length; ++i) {
+ s += " [" + ranges.start(i).toFixed(3) + ", " + ranges.end(i).toFixed(3) + ")";
+ }
+ return s + " }";
+ }
+
+ window['assertBufferedEquals'] = function(obj, expected, description)
+ {
+ var actual = timeRangesToString(obj.buffered);
+ assert_equals(actual, expected, description);
+ };
+
+ window['assertSeekableEquals'] = function(obj, expected, description)
+ {
+ var actual = timeRangesToString(obj.seekable);
+ assert_equals(actual, expected, description);
+ };
+
+})(window);
diff --git a/testing/web-platform/tests/media-source/mp3/sound_5.mp3 b/testing/web-platform/tests/media-source/mp3/sound_5.mp3
new file mode 100644
index 0000000000..bd20291989
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp3/sound_5.mp3
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/h264-starvation-init.mp4 b/testing/web-platform/tests/media-source/mp4/h264-starvation-init.mp4
new file mode 100644
index 0000000000..3724acd8ed
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/h264-starvation-init.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/h264-starvation-media.mp4 b/testing/web-platform/tests/media-source/mp4/h264-starvation-media.mp4
new file mode 100644
index 0000000000..8cc5c5e823
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/h264-starvation-media.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/invalid-codec.mp4 b/testing/web-platform/tests/media-source/mp4/invalid-codec.mp4
new file mode 100644
index 0000000000..6fcc7c21a6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/invalid-codec.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch-manifest.json b/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch-manifest.json
new file mode 100644
index 0000000000..f3caa460e9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-a-128k-44100Hz-1ch.mp4",
+ "type": "audio/mp4;codecs=\"mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch.mp4 b/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch.mp4
new file mode 100644
index 0000000000..fc7832a5b3
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-a-128k-44100Hz-1ch.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch-manifest.json b/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch-manifest.json
new file mode 100644
index 0000000000..41a6f339b7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-a-192k-44100Hz-1ch.mp4",
+ "type": "audio/mp4;codecs=\"mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch.mp4 b/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch.mp4
new file mode 100644
index 0000000000..864a87d25b
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-a-192k-44100Hz-1ch.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..7731e3170e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001,mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.mp4
new file mode 100644
index 0000000000..e623e8ee4c
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..78ded611f6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001,mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.mp4
new file mode 100644
index 0000000000..946167b56e
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..ba46349f93
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001,mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.mp4
new file mode 100644
index 0000000000..ace4bee53a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..24da9b4ce3
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001,mp4a.40.2\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.mp4
new file mode 100644
index 0000000000..f224a5426a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-boxes-audio.mp4 b/testing/web-platform/tests/media-source/mp4/test-boxes-audio.mp4
new file mode 100644
index 0000000000..b1cabbfd21
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-boxes-audio.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-boxes-video.mp4 b/testing/web-platform/tests/media-source/mp4/test-boxes-video.mp4
new file mode 100644
index 0000000000..714c17ca12
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-boxes-video.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr-manifest.json
new file mode 100644
index 0000000000..a31b6d0245
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-v-128k-320x240-24fps-8kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr.mp4
new file mode 100644
index 0000000000..cc55f40fa2
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-24fps-8kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..3e02844105
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-v-128k-320x240-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr.mp4
new file mode 100644
index 0000000000..68d02cdfec
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-320x240-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..10c4f4bcbd
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-v-128k-640x480-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr.mp4
new file mode 100644
index 0000000000..c4f47f0358
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-128k-640x480-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..42d3e1e524
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "mp4/test-v-256k-320x240-30fps-10kfr.mp4",
+ "type": "video/mp4;codecs=\"avc1.4D4001\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr.mp4 b/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr.mp4
new file mode 100644
index 0000000000..6dc4972fd7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test-v-256k-320x240-30fps-10kfr.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mp4/test.mp4 b/testing/web-platform/tests/media-source/mp4/test.mp4
new file mode 100644
index 0000000000..1b0e7b56a6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mp4/test.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-addsourcebuffer.html b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-addsourcebuffer.html
new file mode 100644
index 0000000000..cc9cdc2b50
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-addsourcebuffer.html
@@ -0,0 +1,203 @@
+<!DOCTYPE html>
+<html>
+ <title>Test MediaSource addSourceBuffer overloads for WebCodecs Audio/VideoDecoderConfigs</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+setup(() => {
+ assert_implements(
+ SourceBuffer.prototype.hasOwnProperty('appendEncodedChunks'),
+ 'SourceBuffer prototype hasOwnProperty "appendEncodedChunks", used ' +
+ 'here to feature detect MSE-for-WebCodecs implementation.');
+});
+
+testInvalidArguments();
+testValidArguments();
+
+function getValidAudioConfig() {
+ // TODO(crbug.com/1144908): Consider confirming with WebCodecs'
+ // isConfigSupported() once that API is available.
+ return {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2
+ };
+}
+
+function getValidVideoConfig() {
+ // TODO(crbug.com/1144908): Consider confirming with WebCodecs'
+ // isConfigSupported() once that API is available.
+ return { codec: 'vp09.00.10.08' };
+}
+
+function testInvalidArguments() {
+ const INVALID_CASES = [
+ { arg: null,
+ name: 'null' },
+ { arg: undefined,
+ name: 'undefined' },
+ { arg: { },
+ name: '{ empty dictionary }' },
+ {
+ arg: {
+ audioConfig: getValidAudioConfig(),
+ videoConfig: getValidVideoConfig()
+ },
+ name: '{ valid audioConfig and videoConfig }',
+ },
+ {
+ arg: {
+ audioConfig: {
+ codec: 'bogus',
+ sampleRate: 48000,
+ numberOfChannels: 2
+ }
+ },
+ name: 'bad audio config codec',
+ },
+ { arg: { videoConfig: { codec: 'bogus' } },
+ name: 'bad video config codec' },
+ { arg: { audioConfig: { sampleRate: 48000, numberOfChannels: 2 } },
+ name: 'audio config missing required member "codec"' },
+ { arg: { videoConfig: { } },
+ name: 'video config missing required member "codec"' },
+ ];
+
+ [ 'closed', 'open', 'ended' ].forEach(readyStateScenario => {
+ INVALID_CASES.forEach(invalidCase => {
+ runAddSourceBufferTest(invalidCase['arg'] /* argument */,
+ false /* isValidArgument */,
+ invalidCase['name'] /* argumentDescription */,
+ readyStateScenario);
+ });
+ });
+}
+
+function testValidArguments() {
+ const VALID_CASES = [
+ {
+ arg: {
+ audioConfig: getValidAudioConfig()
+ },
+ name: 'valid audioConfig'
+ },
+ {
+ arg: {
+ videoConfig: getValidVideoConfig()
+ },
+ name: 'valid videoConfig'
+ },
+ ];
+
+ [ 'closed', 'open', 'ended' ].forEach(readyStateScenario => {
+ VALID_CASES.forEach(validCase => {
+ runAddSourceBufferTest(
+ validCase['arg'] /* argument */,
+ true /* isValidArgument */,
+ validCase['name'] /* argumentDescription */,
+ readyStateScenario);
+ });
+ });
+}
+
+async function getClosedMediaSource(test) {
+ let mediaSource = new MediaSource();
+ assert_equals(mediaSource.readyState, 'closed');
+ return mediaSource;
+}
+
+async function getOpenMediaSource(test) {
+ return new Promise(async resolve => {
+ const v = document.createElement('video');
+ const mediaSource = new MediaSource();
+ const url = URL.createObjectURL(mediaSource);
+ mediaSource.addEventListener('sourceopen', test.step_func(() => {
+ URL.revokeObjectURL(url);
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource is open');
+ resolve(mediaSource);
+ }), { once: true });
+ v.src = url;
+ });
+}
+
+async function getEndedMediaSource(test) {
+ let mediaSource = await getOpenMediaSource(test);
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource is open');
+ mediaSource.endOfStream();
+ assert_equals(mediaSource.readyState, 'ended', 'MediaSource is ended');
+ return mediaSource;
+}
+
+function runAddSourceBufferTest(argument, isValidArgument, argumentDescription, readyStateScenario) {
+ const testDescription = 'addSourceBuffer call with ' +
+ (isValidArgument ? 'valid' : 'invalid') +
+ ' argument ' + argumentDescription + ' while MediaSource readyState is ' +
+ readyStateScenario;
+
+ switch(readyStateScenario) {
+ case 'closed':
+ promise_test(async t => {
+ let mediaSource = await getClosedMediaSource(t);
+ assert_equals(mediaSource.readyState, 'closed');
+ let sourceBuffer;
+ if (isValidArgument) {
+ assert_throws_dom('InvalidStateError',
+ () => { sourceBuffer = mediaSource.addSourceBuffer(argument); },
+ 'addSourceBuffer(valid config) throws InvalidStateError if MediaSource is "closed"');
+ assert_equals(sourceBuffer, undefined,
+ 'addSourceBuffer result for valid config while "closed" should be exception');
+ } else {
+ assert_throws_js(TypeError,
+ () => { sourceBuffer = mediaSource.addSourceBuffer(argument); },
+ 'addSourceBuffer(invalid config) throws TypeError if MediaSource is "closed"');
+ assert_equals(sourceBuffer, undefined,
+ 'addSourceBuffer result for invalid config while "closed" should be exception');
+ }
+ }, testDescription);
+ break;
+ case 'open':
+ promise_test(async t => {
+ let mediaSource = await getOpenMediaSource(t);
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource is open');
+ let sourceBuffer;
+ if (isValidArgument) {
+ sourceBuffer = mediaSource.addSourceBuffer(argument);
+ assert_true(sourceBuffer instanceof SourceBuffer,
+ 'addSourceBuffer result for valid config while "open" should be a SourceBuffer instance');
+ } else {
+ assert_throws_js(TypeError,
+ () => { sourceBuffer = mediaSource.addSourceBuffer(argument); },
+ 'addSourceBuffer(invalid config) throws TypeError if MediaSource is "open"');
+ assert_equals(sourceBuffer, undefined,
+ 'addSourceBufferResult for invalid config while "open" should be exception');
+ }
+ }, testDescription);
+ break;
+ case 'ended':
+ promise_test(async t => {
+ let mediaSource = await getEndedMediaSource(t);
+ let sourceBuffer;
+ if (isValidArgument) {
+ assert_throws_dom('InvalidStateError',
+ () => { sourceBuffer = mediaSource.addSourceBuffer(argument); },
+ 'addSourceBuffer(valid config) throws InvalidStateError if MediaSource is "ended"');
+ assert_equals(sourceBuffer, undefined,
+ 'addSourceBuffer result for valid config while "ended" should be exception');
+ } else {
+ assert_throws_js(TypeError,
+ () => { sourceBuffer = mediaSource.addSourceBuffer(argument); },
+ 'addSourceBuffer(invalid config) throws TypeError if MediaSource is "ended"');
+ assert_equals(sourceBuffer, undefined,
+ 'addSourceBuffer result for invalid config while "ended" should be exception');
+ }
+ }, testDescription);
+ break;
+ default:
+ assert_unreached('Invalid readyStateScenario ' + readyStateScenario);
+ break;
+ }
+}
+
+</script>
+</html>
diff --git a/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-appendencodedchunks-play.html b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-appendencodedchunks-play.html
new file mode 100644
index 0000000000..4df317a537
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/mediasource-webcodecs-appendencodedchunks-play.html
@@ -0,0 +1,83 @@
+<!DOCTYPE html>
+<html>
+ <title>Test basic encoded chunk buffering and playback with MediaSource</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+setup(() => {
+ assert_implements(
+ SourceBuffer.prototype.hasOwnProperty('appendEncodedChunks'),
+ 'SourceBuffer prototype hasOwnProperty "appendEncodedChunks", used ' +
+ 'here to feature detect MSE-for-WebCodecs implementation.');
+});
+
+// TODO(crbug.com/1144908): Consider extracting metadata into helper library
+// shared with webcodecs tests. This metadata is adapted from webcodecs/video-decoder-any.js.
+let vp9 = {
+ async buffer() { return (await fetch('vp9.mp4')).arrayBuffer(); },
+ // Note, file might not actually be level 1. See original metadata in webcodecs test suite.
+ codec: "vp09.00.10.08",
+ frames: [{offset: 44, size: 3315, type: 'key'},
+ {offset: 3359, size: 203, type: 'delta'},
+ {offset: 3562, size: 245, type: 'delta'},
+ {offset: 3807, size: 172, type: 'delta'},
+ {offset: 3979, size: 312, type: 'delta'},
+ {offset: 4291, size: 170, type: 'delta'},
+ {offset: 4461, size: 195, type: 'delta'},
+ {offset: 4656, size: 181, type: 'delta'},
+ {offset: 4837, size: 356, type: 'delta'},
+ {offset: 5193, size: 159, type: 'delta'}]
+};
+
+async function getOpenMediaSource(t) {
+ return new Promise(async resolve => {
+ const v = document.createElement('video');
+ document.body.appendChild(v);
+ const mediaSource = new MediaSource();
+ const url = URL.createObjectURL(mediaSource);
+ mediaSource.addEventListener('sourceopen', t.step_func(() => {
+ URL.revokeObjectURL(url);
+ assert_equals(mediaSource.readyState, 'open', 'MediaSource is open');
+ resolve([ v, mediaSource ]);
+ }), { once: true });
+ v.src = url;
+ });
+}
+
+promise_test(async t => {
+ let buffer = await vp9.buffer();
+ let [ videoElement, mediaSource ] = await getOpenMediaSource(t);
+ videoElement.controls = true; // Makes early prototype demo playback easier to control manually.
+ let sourceBuffer = mediaSource.addSourceBuffer({ videoConfig: { codec: vp9.codec } });
+ let next_timestamp = 0;
+ let frame_duration = 100 * 1000; // 100 milliseconds
+ // forEach with async callbacks makes it too easy to have uncaught rejections
+ // that don't fail this promise_test or even emit harness error.
+ // Iterating explicitly instead.
+ for (i = 0; i < vp9.frames.length; i++, next_timestamp += frame_duration) {
+ let frame_metadata = vp9.frames[i];
+ await sourceBuffer.appendEncodedChunks(new EncodedVideoChunk( {
+ type: frame_metadata.type,
+ timestamp: next_timestamp,
+ duration: frame_duration,
+ data: new Uint8Array(buffer, frame_metadata.offset, frame_metadata.size)
+ }));
+ }
+
+ mediaSource.endOfStream();
+
+ return new Promise( (resolve, reject) => {
+ videoElement.onended = resolve;
+ videoElement.onerror = reject;
+ videoElement.play();
+ });
+
+}, "Buffer EncodedVideoChunks (VP9) one-by-one and play them with MSE");
+
+// TODO(crbug.com/1144908): More exhaustive tests (multiple sourcebuffers,
+// varying append patterns, invalid append patterns; eventually more codecs,
+// out-of-order DTS, durations, etc.)
+
+</script>
+</html>
diff --git a/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/vp9.mp4 b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/vp9.mp4
new file mode 100644
index 0000000000..7553e5cae9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/mse-for-webcodecs/tentative/vp9.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/invalid-codec.webm b/testing/web-platform/tests/media-source/webm/invalid-codec.webm
new file mode 100644
index 0000000000..f1c8bdd7ab
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/invalid-codec.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch-manifest.json b/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch-manifest.json
new file mode 100644
index 0000000000..524da8149f
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-a-128k-44100Hz-1ch.webm",
+ "type": "audio/webm;codecs=\"vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch.webm b/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch.webm
new file mode 100644
index 0000000000..c5b064deb9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-a-128k-44100Hz-1ch.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch-manifest.json b/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch-manifest.json
new file mode 100644
index 0000000000..7f2fa1e8c3
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-a-192k-44100Hz-1ch.webm",
+ "type": "audio/webm;codecs=\"vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch.webm b/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch.webm
new file mode 100644
index 0000000000..53814d3bd6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-a-192k-44100Hz-1ch.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..af9f07af15
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8,vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm
new file mode 100644
index 0000000000..8b705dbc89
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..f7ec86b3db
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8,vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.webm
new file mode 100644
index 0000000000..c5e010e3c7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-384k-44100Hz-1ch-640x480-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..96a59db586
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8,vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.webm
new file mode 100644
index 0000000000..62c43288e6
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-448k-44100Hz-1ch-640x480-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..86723b34a8
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8,vorbis\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.webm
new file mode 100644
index 0000000000..93c31b6a97
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-av-640k-44100Hz-1ch-640x480-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr-manifest.json
new file mode 100644
index 0000000000..00e103aca9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-v-128k-320x240-24fps-8kfr.webm",
+ "type": "video/webm;codecs=\"vp8\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr.webm b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr.webm
new file mode 100644
index 0000000000..189c472f99
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-24fps-8kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..fdeeb401d9
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-v-128k-320x240-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr.webm
new file mode 100644
index 0000000000..18b2bafc3a
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-320x240-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..4e30460667
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-v-128k-640x480-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr.webm
new file mode 100644
index 0000000000..75e38b0bfa
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-128k-640x480-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr-manifest.json b/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr-manifest.json
new file mode 100644
index 0000000000..3470674bff
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr-manifest.json
@@ -0,0 +1,4 @@
+{
+ "url": "webm/test-v-256k-320x240-30fps-10kfr.webm",
+ "type": "video/webm;codecs=\"vp8\""
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr.webm b/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr.webm
new file mode 100644
index 0000000000..0250d26faf
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-v-256k-320x240-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-vp8-vorbis-webvtt.webm b/testing/web-platform/tests/media-source/webm/test-vp8-vorbis-webvtt.webm
new file mode 100644
index 0000000000..c626f86e33
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-vp8-vorbis-webvtt.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test-vp9.webm b/testing/web-platform/tests/media-source/webm/test-vp9.webm
new file mode 100644
index 0000000000..d63dfdaac7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test-vp9.webm
Binary files differ
diff --git a/testing/web-platform/tests/media-source/webm/test.webm b/testing/web-platform/tests/media-source/webm/test.webm
new file mode 100644
index 0000000000..3a601805d7
--- /dev/null
+++ b/testing/web-platform/tests/media-source/webm/test.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/1x1-green.png b/testing/web-platform/tests/media/1x1-green.png
new file mode 100644
index 0000000000..b98ca0ba0a
--- /dev/null
+++ b/testing/web-platform/tests/media/1x1-green.png
Binary files differ
diff --git a/testing/web-platform/tests/media/2048x1360-random.jpg b/testing/web-platform/tests/media/2048x1360-random.jpg
new file mode 100644
index 0000000000..e985d5c903
--- /dev/null
+++ b/testing/web-platform/tests/media/2048x1360-random.jpg
Binary files differ
diff --git a/testing/web-platform/tests/media/2x2-green.mp4 b/testing/web-platform/tests/media/2x2-green.mp4
new file mode 100644
index 0000000000..157361c2dd
--- /dev/null
+++ b/testing/web-platform/tests/media/2x2-green.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/2x2-green.ogv b/testing/web-platform/tests/media/2x2-green.ogv
new file mode 100644
index 0000000000..29903c0a81
--- /dev/null
+++ b/testing/web-platform/tests/media/2x2-green.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/400x300-red-resize-200x150-green.mp4 b/testing/web-platform/tests/media/400x300-red-resize-200x150-green.mp4
new file mode 100644
index 0000000000..1058c1bf26
--- /dev/null
+++ b/testing/web-platform/tests/media/400x300-red-resize-200x150-green.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/400x300-red-resize-200x150-green.webm b/testing/web-platform/tests/media/400x300-red-resize-200x150-green.webm
new file mode 100644
index 0000000000..53b6517849
--- /dev/null
+++ b/testing/web-platform/tests/media/400x300-red-resize-200x150-green.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/A4.mp4 b/testing/web-platform/tests/media/A4.mp4
new file mode 100644
index 0000000000..ef66d43b9e
--- /dev/null
+++ b/testing/web-platform/tests/media/A4.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/A4.ogv b/testing/web-platform/tests/media/A4.ogv
new file mode 100644
index 0000000000..de99616ece
--- /dev/null
+++ b/testing/web-platform/tests/media/A4.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/META.yml b/testing/web-platform/tests/media/META.yml
new file mode 100644
index 0000000000..69172f5b39
--- /dev/null
+++ b/testing/web-platform/tests/media/META.yml
@@ -0,0 +1,2 @@
+suggested_reviewers:
+ - hillbrad
diff --git a/testing/web-platform/tests/media/counting.mp4 b/testing/web-platform/tests/media/counting.mp4
new file mode 100644
index 0000000000..5fbd6d97f3
--- /dev/null
+++ b/testing/web-platform/tests/media/counting.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/counting.ogv b/testing/web-platform/tests/media/counting.ogv
new file mode 100644
index 0000000000..ce03c19e50
--- /dev/null
+++ b/testing/web-platform/tests/media/counting.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/foo-no-cors.vtt b/testing/web-platform/tests/media/foo-no-cors.vtt
new file mode 100644
index 0000000000..b533895c60
--- /dev/null
+++ b/testing/web-platform/tests/media/foo-no-cors.vtt
@@ -0,0 +1,4 @@
+WEBVTT
+
+00:00:00.000 --> 00:00:05.000
+Foo
diff --git a/testing/web-platform/tests/media/foo.vtt b/testing/web-platform/tests/media/foo.vtt
new file mode 100644
index 0000000000..b533895c60
--- /dev/null
+++ b/testing/web-platform/tests/media/foo.vtt
@@ -0,0 +1,4 @@
+WEBVTT
+
+00:00:00.000 --> 00:00:05.000
+Foo
diff --git a/testing/web-platform/tests/media/foo.vtt.headers b/testing/web-platform/tests/media/foo.vtt.headers
new file mode 100644
index 0000000000..23de552c1a
--- /dev/null
+++ b/testing/web-platform/tests/media/foo.vtt.headers
@@ -0,0 +1 @@
+Access-Control-Allow-Origin: * \ No newline at end of file
diff --git a/testing/web-platform/tests/media/green-at-15.mp4 b/testing/web-platform/tests/media/green-at-15.mp4
new file mode 100644
index 0000000000..a9d2b979d4
--- /dev/null
+++ b/testing/web-platform/tests/media/green-at-15.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/green-at-15.ogv b/testing/web-platform/tests/media/green-at-15.ogv
new file mode 100644
index 0000000000..50d59dfb38
--- /dev/null
+++ b/testing/web-platform/tests/media/green-at-15.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/movie_300.mp4 b/testing/web-platform/tests/media/movie_300.mp4
new file mode 100644
index 0000000000..cf59777fb9
--- /dev/null
+++ b/testing/web-platform/tests/media/movie_300.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/movie_300.ogv b/testing/web-platform/tests/media/movie_300.ogv
new file mode 100644
index 0000000000..0f83996e5d
--- /dev/null
+++ b/testing/web-platform/tests/media/movie_300.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/movie_5.mp4 b/testing/web-platform/tests/media/movie_5.mp4
new file mode 100644
index 0000000000..fb5dbca238
--- /dev/null
+++ b/testing/web-platform/tests/media/movie_5.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/movie_5.ogv b/testing/web-platform/tests/media/movie_5.ogv
new file mode 100644
index 0000000000..e8990d1120
--- /dev/null
+++ b/testing/web-platform/tests/media/movie_5.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/poster.png b/testing/web-platform/tests/media/poster.png
new file mode 100644
index 0000000000..bf6e253b3d
--- /dev/null
+++ b/testing/web-platform/tests/media/poster.png
Binary files differ
diff --git a/testing/web-platform/tests/media/sine440.mp3 b/testing/web-platform/tests/media/sine440.mp3
new file mode 100644
index 0000000000..4ded8b3380
--- /dev/null
+++ b/testing/web-platform/tests/media/sine440.mp3
Binary files differ
diff --git a/testing/web-platform/tests/media/sound_0.mp3 b/testing/web-platform/tests/media/sound_0.mp3
new file mode 100644
index 0000000000..a15d1de328
--- /dev/null
+++ b/testing/web-platform/tests/media/sound_0.mp3
Binary files differ
diff --git a/testing/web-platform/tests/media/sound_0.oga b/testing/web-platform/tests/media/sound_0.oga
new file mode 100644
index 0000000000..c6f5fcb82d
--- /dev/null
+++ b/testing/web-platform/tests/media/sound_0.oga
Binary files differ
diff --git a/testing/web-platform/tests/media/sound_5.mp3 b/testing/web-platform/tests/media/sound_5.mp3
new file mode 100644
index 0000000000..bd20291989
--- /dev/null
+++ b/testing/web-platform/tests/media/sound_5.mp3
Binary files differ
diff --git a/testing/web-platform/tests/media/sound_5.oga b/testing/web-platform/tests/media/sound_5.oga
new file mode 100644
index 0000000000..239ad2bd08
--- /dev/null
+++ b/testing/web-platform/tests/media/sound_5.oga
Binary files differ
diff --git a/testing/web-platform/tests/media/test-1s.mp4 b/testing/web-platform/tests/media/test-1s.mp4
new file mode 100644
index 0000000000..76d6820007
--- /dev/null
+++ b/testing/web-platform/tests/media/test-1s.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/test-1s.webm b/testing/web-platform/tests/media/test-1s.webm
new file mode 100644
index 0000000000..772e264ff9
--- /dev/null
+++ b/testing/web-platform/tests/media/test-1s.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/test-a-128k-44100Hz-1ch.webm b/testing/web-platform/tests/media/test-a-128k-44100Hz-1ch.webm
new file mode 100644
index 0000000000..c5b064deb9
--- /dev/null
+++ b/testing/web-platform/tests/media/test-a-128k-44100Hz-1ch.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm b/testing/web-platform/tests/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm
new file mode 100644
index 0000000000..8b705dbc89
--- /dev/null
+++ b/testing/web-platform/tests/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/test-v-128k-320x240-24fps-8kfr.webm b/testing/web-platform/tests/media/test-v-128k-320x240-24fps-8kfr.webm
new file mode 100644
index 0000000000..189c472f99
--- /dev/null
+++ b/testing/web-platform/tests/media/test-v-128k-320x240-24fps-8kfr.webm
Binary files differ
diff --git a/testing/web-platform/tests/media/test.mp4 b/testing/web-platform/tests/media/test.mp4
new file mode 100644
index 0000000000..d278c8ad8c
--- /dev/null
+++ b/testing/web-platform/tests/media/test.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/test.ogv b/testing/web-platform/tests/media/test.ogv
new file mode 100644
index 0000000000..0c55f6c722
--- /dev/null
+++ b/testing/web-platform/tests/media/test.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/video.ogv b/testing/web-platform/tests/media/video.ogv
new file mode 100644
index 0000000000..5cb5f87848
--- /dev/null
+++ b/testing/web-platform/tests/media/video.ogv
Binary files differ
diff --git a/testing/web-platform/tests/media/white.mp4 b/testing/web-platform/tests/media/white.mp4
new file mode 100644
index 0000000000..ef609e4281
--- /dev/null
+++ b/testing/web-platform/tests/media/white.mp4
Binary files differ
diff --git a/testing/web-platform/tests/media/white.webm b/testing/web-platform/tests/media/white.webm
new file mode 100644
index 0000000000..bbacad7ffd
--- /dev/null
+++ b/testing/web-platform/tests/media/white.webm
Binary files differ
diff --git a/testing/web-platform/tests/mediacapture-extensions/GUM-backgroundBlur.https.html b/testing/web-platform/tests/mediacapture-extensions/GUM-backgroundBlur.https.html
new file mode 100644
index 0000000000..605a4e0831
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-extensions/GUM-backgroundBlur.https.html
@@ -0,0 +1,150 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>Test background blur support</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-extensions/">
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks background blur support.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+"use strict";
+
+const constraintSet = {
+ backgroundBlur: true
+};
+
+Object.keys(constraintSet).forEach(property => {
+ test(t => {
+ const supportedConstraints =
+ navigator.mediaDevices.getSupportedConstraints();
+ assert_implements_optional(
+ supportedConstraints[property],
+ `Optional property ${property} not in supported constraints`);
+ }, `Test getSupportedConstraints().${property}`);
+
+ promise_test(async t => {
+ const supportedConstraints =
+ navigator.mediaDevices.getSupportedConstraints();
+
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+ const [videoTrack] = stream.getVideoTracks();
+
+ assert_equals(typeof videoTrack.getCapabilities, 'function');
+ const capabilities = videoTrack.getCapabilities();
+ assert_equals(typeof capabilities, 'object');
+
+ if (!supportedConstraints[property]) {
+ assert_false(property in capabilities);
+ }
+
+ assert_implements_optional(
+ property in capabilities,
+ `Optional property ${property} not in capabilities`);
+
+ // Accept [false], [false, true], [true] and [true, false].
+ assert_array_equals(
+ capabilities[property],
+ capabilities[property].length == 1
+ ? [!!capabilities[property][0]]
+ : [!!capabilities[property][0], !capabilities[property][0]]);
+ }, `Test getCapabilities().${property}`);
+
+ promise_test(async t => {
+ const supportedConstraints =
+ navigator.mediaDevices.getSupportedConstraints();
+
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+ const [videoTrack] = stream.getVideoTracks();
+
+ const capabilities = videoTrack.getCapabilities();
+
+ assert_equals(typeof videoTrack.getSettings, 'function');
+ const settings = videoTrack.getSettings();
+ assert_equals(typeof settings, 'object');
+
+ if (!supportedConstraints[property] || !(property in capabilities))
+ assert_false(property in settings);
+
+ assert_implements_optional(
+ property in capabilities,
+ `Optional property ${property} not in capabilities`);
+
+ assert_in_array(settings[property], capabilities[property]);
+ }, `Test getSettings().${property}`);
+
+ promise_test(async t => {
+ const supportedConstraints =
+ navigator.mediaDevices.getSupportedConstraints();
+
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+ const [videoTrack] = stream.getVideoTracks();
+
+ const capabilities = videoTrack.getCapabilities();
+ const constraints = {advanced: [{
+ [property]: constraintSet[property]
+ }]};
+ const oldSettings = videoTrack.getSettings();
+
+ if (supportedConstraints[property] && !(property in capabilities)) {
+ // The user agent supports |constraints| but |videoTrack| is not capable
+ // to apply them.
+ await videoTrack.applyConstraints(constraints).then(
+ () => {
+ assert_unreached('Unexpected success applying constraints');
+ },
+ error => {});
+ } else {
+ // The user agent does not support |constraints| and will ignore them or
+ // the user agent supports |constraints| and |videoTrack| is capable to
+ // apply them.
+ await videoTrack.applyConstraints(constraints).then(
+ () => {},
+ error => {
+ assert_unreached(`Error applying constraints: ${error.message}`);
+ });
+ }
+
+ assert_equals(typeof videoTrack.getConstraints, 'function');
+ const appliedConstraints = videoTrack.getConstraints();
+ assert_equals(typeof appliedConstraints, 'object');
+ const newSettings = videoTrack.getSettings();
+
+ if (!supportedConstraints[property] || !(property in capabilities)) {
+ // The user agent does not support |constraints| and ignored them or
+ // the user agent supports |constraints| but |videoTrack| was not capable
+ // to apply them.
+ assert_object_equals(appliedConstraints, {});
+ } else {
+ // The user agent supports |constraints| and |videoTrack| was capable to
+ // apply them.
+ assert_object_equals(appliedConstraints, constraints);
+ }
+
+ if (!supportedConstraints[property] || !(property in capabilities) ||
+ !capabilities[property].includes(constraintSet[property])) {
+ // The user agent does not support |constraints| and ignored them or
+ // the user agent supports |constraints| but |videoTrack| was not capable
+ // to apply them or the user agent supports |constraints| and
+ // |videoTrack| was capable to apply them but could not satisfy advanced
+ // constraints and skipped them.
+ assert_object_equals(newSettings, oldSettings);
+ } else {
+ // The user agent supports |constraints| and |videoTrack| was capable to
+ // apply them and could satisfy advanced constraints.
+ assert_equals(newSettings[property], constraintSet[property]);
+ }
+ }, `Test applyConstraints() with ${property}`);
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/HTMLCanvasElement-getImageData-noframe.html b/testing/web-platform/tests/mediacapture-fromelement/HTMLCanvasElement-getImageData-noframe.html
new file mode 100644
index 0000000000..48d445f897
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/HTMLCanvasElement-getImageData-noframe.html
@@ -0,0 +1,30 @@
+<!DOCTYPE html>
+<html>
+<body>
+<canvas id="canvas"></canvas>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+promise_test(async t => {
+ const canvas = document.getElementById("canvas");
+ const ctx = canvas.getContext("2d");
+ const stream = canvas.captureStream();
+ t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
+
+ const video = document.createElement("video");
+ video.srcObject = stream;
+
+ ctx.getImageData(0, 0, canvas.width, canvas.height);
+
+ assert_equals(video.readyState, video.HAVE_NOTHING,
+ "Video element was just created");
+
+ // Wait a bit so the video element can update readyState in case of a frame.
+ await new Promise(r => t.step_timeout(r, 100));
+
+ assert_equals(video.readyState, video.HAVE_NOTHING,
+ "Video element did not get a frame from the canvas");
+}, "CanvasRenderingContext2D.getImageData() does not lead to a frame being captured");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/META.yml b/testing/web-platform/tests/mediacapture-fromelement/META.yml
new file mode 100644
index 0000000000..17195009fa
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/META.yml
@@ -0,0 +1,5 @@
+spec: https://w3c.github.io/mediacapture-fromelement/
+suggested_reviewers:
+ - yellowdoge
+ - martinthomson
+ - uysalere
diff --git a/testing/web-platform/tests/mediacapture-fromelement/capture.html b/testing/web-platform/tests/mediacapture-fromelement/capture.html
new file mode 100644
index 0000000000..74858737f1
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/capture.html
@@ -0,0 +1,38 @@
+<!DOCTYPE html>
+<html>
+<head>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+</head>
+<body>
+<script>
+
+// Run captureStream() on different videos, and assert data is flowing.
+
+function makeAsyncTest(filename) {
+ promise_test(async test => {
+ const video = document.createElement('video');
+ video.src = "/media/" + filename;
+ video.onerror = this.unreached_func("<video> error");
+ video.play();
+
+ const stream = video.captureStream();
+
+ await new Promise(r => stream.onaddtrack = r);
+ const recorder = new MediaRecorder(stream);
+
+ recorder.start(0);
+ const {data} = await new Promise(r => recorder.ondataavailable = r);
+ assert_true(data.size > 0, 'Recorded data size should be > 0');
+ }), "<video>.captureStream() and assert data flows.";
+}
+
+generate_tests(makeAsyncTest, [
+ [ "video-only", "test-v-128k-320x240-24fps-8kfr.webm" ],
+ [ "audio-only", "test-a-128k-44100Hz-1ch.webm" ],
+ [ "video+audio", "test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm" ]
+]);
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/creation.html b/testing/web-platform/tests/mediacapture-fromelement/creation.html
new file mode 100644
index 0000000000..b025a3ad6c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/creation.html
@@ -0,0 +1,46 @@
+<!DOCTYPE html>
+<html>
+<head>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+</head>
+<body>
+<script>
+
+// Run captureStream() on <video>/<audio>s and inspect the generated Stream.
+
+var makeAsyncTest = function(filename, numTracks) {
+ async_test(function() {
+ var video = document.createElement('video');
+ video.src = "/media/" + filename;
+ video.onerror = this.unreached_func("<video> error");
+
+ assert_true('captureStream' in video);
+
+ var stream = video.captureStream();
+ assert_not_equals(stream, null, "error generating stream");
+
+ stream.onaddtrack = this.step_func_done(function() {
+ var tracks = stream.getTracks();
+ var idx;
+
+ for (idx = 0; idx < tracks.length; idx += 1) {
+ assert_equals(tracks[idx].readyState, 'live')
+ }
+
+ // The stream got a (number of) MediaStreamTracks added.
+ assert_equals(stream.getVideoTracks().length, numTracks['vid'], 'video');
+ assert_equals(stream.getAudioTracks().length, numTracks['aud'], 'audio');
+ });
+ }), "<video>.captureStream()";
+};
+
+generate_tests(makeAsyncTest, [
+ [ "video-only", "test-v-128k-320x240-24fps-8kfr.webm", {vid : 1, aud : 0} ],
+ [ "audio-only", "test-a-128k-44100Hz-1ch.webm", {vid : 0, aud : 1} ],
+ [ "video+audio", "test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm", {vid : 1, aud : 1} ]
+]);
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/cross-origin.html b/testing/web-platform/tests/mediacapture-fromelement/cross-origin.html
new file mode 100644
index 0000000000..14d92c8fc8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/cross-origin.html
@@ -0,0 +1,41 @@
+<!DOCTYPE html>
+<html>
+<head>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+</head>
+<body>
+<video autoplay controls id="output"></video>
+<script>
+
+// Run captureStream() on cross-origin <video> content
+ async_test(function() {
+ const video = document.createElement('video');
+ video.src = location.origin.replace("//", "//www1.") + "/media/white.webm";
+ video.onerror = this.unreached_func("<video> error");
+ video.loop = true;
+ video.play();
+
+ const stream = video.captureStream();
+ assert_not_equals(stream, null, "error generating stream");
+ const output = document.getElementById("output");
+ const canvas = document.createElement("canvas");
+ const ctx = canvas.getContext('2d');
+
+ stream.onaddtrack = this.step_func_done(function() {
+ canvas.width = output.videoWidth || 320;
+ canvas.height = output.videoHeight || 240;
+ // The stream got a (number of) MediaStreamTracks added.
+ assert_equals(stream.getVideoTracks().length, 1, 'video tracks');
+ assert_equals(stream.getAudioTracks().length, 0, 'audio');
+ assert_true(stream.getVideoTracks()[0].muted, 'cross-origin video is muted');
+ ctx.drawImage(output, 0, 0, canvas.width, canvas.height);
+
+ const pixels = ctx.getImageData(0,0,canvas.width, canvas.height);
+ assert_equals(pixels.data[canvas.width*canvas.height*4 - 4], 0, "cross-origin content appears black");
+ }, "<video>.captureStream() returns muted/black stream");
+ }, "Capturing stream from cross-origin video");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/ended.html b/testing/web-platform/tests/mediacapture-fromelement/ended.html
new file mode 100644
index 0000000000..845fbcbaa6
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/ended.html
@@ -0,0 +1,41 @@
+<!DOCTYPE html>
+<html>
+<head>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+</head>
+<body>
+<script>
+
+// Run captureStream() on different videos, and assert the mediastream is
+// ended when the source HTMLMediaElement finishes
+
+var makeAsyncTest = function(filename) {
+ async_test(function(test) {
+ var video = document.createElement('video');
+ video.src = "/media/" + filename;
+ video.onerror = this.unreached_func("<video> error");
+ video.play();
+
+ assert_true('captureStream' in video);
+
+ var stream = video.captureStream();
+
+ stream.onremovetrack = this.step_func_done(function() {
+ assert_true(video.ended, 'video must be ended');
+ assert_equals(stream.getTracks().length, 0, 'stream must have no tracks');
+ assert_false(stream.active, 'stream must be inactive');
+ });
+
+ }), "<video>.captureStream() and assert ended event.";
+};
+
+generate_tests(makeAsyncTest, [
+ [ "video-only", "test-v-128k-320x240-24fps-8kfr.webm" ],
+ [ "audio-only", "test-a-128k-44100Hz-1ch.webm" ],
+ [ "video+audio", "test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm" ]
+]);
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/historical.html b/testing/web-platform/tests/mediacapture-fromelement/historical.html
new file mode 100644
index 0000000000..3b86e9de35
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/historical.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Historical features</title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+ <script>
+ // https://lists.w3.org/Archives/Public/public-media-capture/2015Nov/0012.html
+ test(function() {
+ assert_false(MediaStream.prototype.hasOwnProperty('onactive'));
+ }, 'the deprecated MediaStream `onactive` event handler property is not available');
+ </script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-fromelement/idlharness.window.js b/testing/web-platform/tests/mediacapture-fromelement/idlharness.window.js
new file mode 100644
index 0000000000..970f3cd5c6
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-fromelement/idlharness.window.js
@@ -0,0 +1,38 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+// https://w3c.github.io/mediacapture-fromelement/
+
+'use strict';
+
+idl_test(
+ ['mediacapture-fromelement'],
+ ['mediacapture-streams', 'html', 'dom'],
+ idl_array => {
+ // Ignored errors will be surfaced when the elements are undefined below.
+ try {
+ self.video = document.createElement('video');
+ video.width = video.height = 10;
+ document.body.appendChild(video);
+ } catch (e) { }
+
+ try {
+ self.audio = document.createElement('audio');
+ document.body.appendChild(audio);
+ } catch (e) { }
+
+ try {
+ self.canvas = document.createElement('canvas');
+ document.body.appendChild(canvas);
+ canvas.width = canvas.height = 10;
+ self.track = canvas.captureStream().getTracks()[0];
+ } catch (e) { }
+
+ idl_array.add_objects({
+ HTMLVideoElement: ['video'],
+ HTMLAudioElement: ['audio'],
+ HTMLCanvasElement: ['canvas'],
+ CanvasCaptureMediaStreamTrack: ['track'],
+ });
+ }
+);
diff --git a/testing/web-platform/tests/mediacapture-handle/identity/MediaDevices-setCaptureHandleConfig.https.window.js b/testing/web-platform/tests/mediacapture-handle/identity/MediaDevices-setCaptureHandleConfig.https.window.js
new file mode 100644
index 0000000000..6fa01ad2b9
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-handle/identity/MediaDevices-setCaptureHandleConfig.https.window.js
@@ -0,0 +1,54 @@
+'use strict';
+
+test(() => {
+ assert_true(!!navigator.mediaDevices.setCaptureHandleConfig);
+ navigator.mediaDevices.setCaptureHandleConfig({handle: 'X'.repeat(1024)});
+}, 'setCaptureHandleConfig does not throw if handle length is 1024.');
+
+test(() => {
+ assert_true(!!navigator.mediaDevices.setCaptureHandleConfig);
+ assert_throws_js(
+ TypeError,
+ () => navigator.mediaDevices.setCaptureHandleConfig(
+ {handle: 'X'.repeat(1025)}),
+ 'handle length must be 1024 characters or less');
+}, 'setCaptureHandleConfig raises TypeError if handle is invalid.');
+
+test(() => {
+ assert_true(!!navigator.mediaDevices.setCaptureHandleConfig);
+ navigator.mediaDevices.setCaptureHandleConfig({permittedOrigins: ['*']});
+}, 'setCaptureHandleConfig does not throw if asterisk character appears only once.');
+
+test(() => {
+ assert_true(!!navigator.mediaDevices.setCaptureHandleConfig);
+ assert_throws_dom(
+ 'NotSupportedError',
+ () => navigator.mediaDevices.setCaptureHandleConfig(
+ {permittedOrigins: ['*', '*']}),
+ 'two asterisk characters are not allowed');
+
+ assert_throws_dom(
+ 'NotSupportedError',
+ () => navigator.mediaDevices.setCaptureHandleConfig(
+ {permittedOrigins: ['*', 'http://example.com']}),
+ 'asterisk character is not allowed with valid origins');
+
+ assert_throws_dom(
+ 'NotSupportedError',
+ () => navigator.mediaDevices.setCaptureHandleConfig(
+ {permittedOrigins: ['about://blank']}),
+ 'origins must be valid');
+}, 'setCaptureHandleConfig raises NotSupportedError if permittedOrigins is invalid.');
+
+
+test(() => {
+ assert_true(!!navigator.mediaDevices.setCaptureHandleConfig);
+ const iframe = document.createElement('iframe');
+ document.body.appendChild(iframe);
+ const mediaDevices = iframe.contentWindow.navigator.mediaDevices;
+ const iframeDOMException = iframe.contentWindow.DOMException;
+
+ assert_throws_dom('InvalidStateError', iframeDOMException, () => {
+ mediaDevices.setCaptureHandleConfig();
+ });
+}, 'setCaptureHandleConfig raises InvalidStateError if not from top-level browsing context.');
diff --git a/testing/web-platform/tests/mediacapture-image/ImageCapture-MediaTrackSupportedConstraints.https.html b/testing/web-platform/tests/mediacapture-image/ImageCapture-MediaTrackSupportedConstraints.https.html
new file mode 100644
index 0000000000..fddeb60d0c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/ImageCapture-MediaTrackSupportedConstraints.https.html
@@ -0,0 +1,29 @@
+<!DOCTYPE HTML>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+// Tests that getSupportedConstraints() returns what it should.
+test(function() {
+ supported_constraints = navigator.mediaDevices.getSupportedConstraints();
+
+ assert_true(supported_constraints.whiteBalanceMode);
+ assert_true(supported_constraints.exposureMode);
+ assert_true(supported_constraints.focusMode);
+ assert_true(supported_constraints.pointsOfInterest);
+ assert_true(supported_constraints.exposureCompensation);
+ assert_true(supported_constraints.exposureTime);
+ assert_true(supported_constraints.colorTemperature);
+ assert_true(supported_constraints.iso);
+ assert_true(supported_constraints.brightness);
+ assert_true(supported_constraints.contrast);
+ assert_true(supported_constraints.saturation);
+ assert_true(supported_constraints.sharpness);
+ assert_true(supported_constraints.focusDistance);
+ assert_true(supported_constraints.pan);
+ assert_true(supported_constraints.tilt);
+ assert_true(supported_constraints.zoom);
+ assert_true(supported_constraints.torch);
+}, 'Image Capture supported constraints');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/ImageCapture-creation.https.html b/testing/web-platform/tests/mediacapture-image/ImageCapture-creation.https.html
new file mode 100644
index 0000000000..81a503893a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/ImageCapture-creation.https.html
@@ -0,0 +1,100 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src='../mediacapture-streams/permission-helper.js'></script>
+<script>
+
+// This test verifies that ImageCapture can be created (or not) with different
+// Media Stream Track types (audio, video).
+
+function makeAsyncTest(modifyTrack, message) {
+ async_test(function(test) {
+
+ const gotStream = test.step_func(function(stream) {
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ var videoTrack = stream.getVideoTracks()[0];
+ assert_equals(videoTrack.readyState, 'live');
+ assert_true(videoTrack.enabled);
+ assert_false(videoTrack.muted);
+
+ var capturer = new ImageCapture(videoTrack);
+ assert_equals(capturer.track, videoTrack);
+
+ modifyTrack(videoTrack);
+
+ promise_rejects_dom(test,
+ 'InvalidStateError',
+ capturer.grabFrame(),
+ 'Should throw InvalidStateError.')
+ .then(() => test.done());
+ });
+
+ const onError = test.unreached_func('Error creating MediaStream.');
+ // both permissions are needed at some point, asking for both at once
+ setMediaPermission()
+ .then(() => navigator.mediaDevices.getUserMedia({video: true}))
+ .then(gotStream)
+ .catch(onError);
+
+ }, message);
+}
+
+var disableTrack = function(videoTrack) {
+ // grabFrame() is rejected if the associated video track is disabled.
+ videoTrack.enabled = false;
+};
+
+var stopTrack = function(videoTrack) {
+ // grabFrame() is rejected if the associated video track is ended.
+ videoTrack.stop();
+ assert_equals(videoTrack.readyState, 'ended');
+};
+
+// Create the rejection tests. Note that grabFrame() would also be rejected if
+// the video Track was muted but that's a read-only property of the Track.
+makeAsyncTest(disableTrack, 'grabFrame() of a disabled Track');
+makeAsyncTest(stopTrack, 'grabFrame() of an ended Track');
+
+
+var testAudio = async_test(function() {
+ navigator.mediaDevices.getUserMedia({audio:true})
+ .then(
+ this.step_func(function(stream) {
+ assert_equals(stream.getAudioTracks().length, 1);
+ assert_equals(stream.getVideoTracks().length, 0);
+ assert_throws_dom("NotSupportedError",
+ function() {
+ var capturer = new ImageCapture(stream.getAudioTracks()[0]);
+ },
+ 'an ImageCapturer can only be created from a video track');
+
+ this.done();
+ }))
+ .catch(
+ this.unreached_func('Error creating MediaStream.'));
+}, 'verifies that an ImageCapture cannot be created out of an Audio Track');
+
+var testParameter = test(function() {
+ const invalidParameters = [
+ "invalid",
+ null,
+ 123,
+ {},
+ "",
+ true
+ ];
+ assert_throws_js(TypeError,
+ function() { var capturer = new ImageCapture(); },
+ 'an ImageCapturer cannot be created with no parameter');
+ invalidParameters.map(parameter => {
+ assert_throws_js(TypeError,
+ function() { var capturer = new ImageCapture(parameter); },
+ `an ImageCapturer cannot be created with a ${parameter} parameter`);
+ });
+}, 'throw "TypeError" if parameter is not MediaStreamTrack.');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame-crash.html b/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame-crash.html
new file mode 100644
index 0000000000..d4bf57f0fa
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame-crash.html
@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>Repro of https://crbug.com/1374294</title>
+<body></body>
+<script>
+ let iframe = document.createElement('iframe');
+ document.body.appendChild(iframe);
+ let generator = new iframe.contentWindow.MediaStreamTrackGenerator({kind: 'video'});
+ const capture = new ImageCapture(generator);
+ capture.grabFrame();
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame.html b/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame.html
new file mode 100644
index 0000000000..bf5e9400a0
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/ImageCapture-grabFrame.html
@@ -0,0 +1,46 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<canvas id='canvas0' width=10 height=10/>
+<canvas id='canvas1' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that ImageCapture can grabFrame()s.
+
+var test = async_test(function() {
+ var canvas0 = document.getElementById('canvas0');
+ var context0 = canvas0.getContext("2d");
+ context0.fillStyle = "red";
+ context0.fillRect(0, 0, 10, 10);
+
+ var stream = canvas0.captureStream();
+
+ var capturer = new ImageCapture(stream.getVideoTracks()[0]);
+
+ capturer.grabFrame()
+ .then(bitmap => {
+ assert_equals(canvas0.width, bitmap.width);
+ assert_equals(canvas0.height, bitmap.height);
+
+ var context1 = document.getElementById('canvas1').getContext("2d");
+ context1.drawImage(bitmap, 0, 0);
+
+ var imageData0 = context0.getImageData(0, 0, 10, 10);
+ var imageData1 = context1.getImageData(0, 0, 10, 10);
+
+ assert_equals(imageData0.width, imageData1.width);
+ assert_equals(imageData0.height, imageData1.height);
+ assert_equals(imageData0.data.length, imageData1.data.length);
+ for (var i = 0; i < imageData0.data.length; i++)
+ assert_approx_equals(imageData0.data[i], imageData1.data[i], 5);
+
+ this.done();
+ })
+ .catch(error => {
+ assert_unreached('Error during grabFrame(): '+ error);
+ });
+}, 'exercises the ImageCapture API creation and grabFrame().');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/ImageCapture-track.html b/testing/web-platform/tests/mediacapture-image/ImageCapture-track.html
new file mode 100644
index 0000000000..774970ad0c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/ImageCapture-track.html
@@ -0,0 +1,31 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>ImageCapture track</title>
+<link rel="author" title="Intel" href="http://www.intel.com">
+<link rel="help" href="https://w3c.github.io/mediacapture-image/#dom-imagecapture-track">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<canvas id='canvas' width=10 height=10></canvas>
+<script>
+
+test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+
+ let capturer = new ImageCapture(videoTrack);
+ assert_true(capturer.track instanceof MediaStreamTrack);
+ assert_equals(capturer.track, videoTrack);
+
+ let cloneTrack = videoTrack.clone();
+ assert_not_equals(videoTrack, cloneTrack);
+
+ capturer.track = cloneTrack;
+ assert_equals(capturer.track, videoTrack);
+}, "ImageCapture track attribute is readonly")
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/META.yml b/testing/web-platform/tests/mediacapture-image/META.yml
new file mode 100644
index 0000000000..1b4fef5e5a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/META.yml
@@ -0,0 +1,4 @@
+spec: https://w3c.github.io/mediacapture-image/
+suggested_reviewers:
+ - yellowdoge
+ - reillyeon
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-fast.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-fast.html
new file mode 100644
index 0000000000..2bc5999544
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-fast.html
@@ -0,0 +1,66 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that MediaStreamTrack.applyConstraints() exists and that,
+// when called with no parameters, returns a Promise that is resolved. This
+// might not make sense: https://github.com/w3c/mediacapture-main/issues/438 .
+// Other tests go deeper.
+promise_test(function(t) {
+ var canvas = document.getElementById('canvas');
+ var context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+
+ var stream = canvas.captureStream();
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ var videoTrack = stream.getVideoTracks()[0];
+ return videoTrack.applyConstraints();
+}, 'MediaStreamTrack.applyConstraints()');
+
+// This test verifies that MediaStreamTrack.applyConstraints() exists and that,
+// when called with an empty advanced constraint set, returns a Promise that is
+// resolved.
+promise_test(function(t) {
+var canvas = document.getElementById('canvas');
+var context = canvas.getContext("2d");
+context.fillStyle = "red";
+context.fillRect(0, 0, 10, 10);
+
+var stream = canvas.captureStream();
+assert_equals(stream.getAudioTracks().length, 0);
+assert_equals(stream.getVideoTracks().length, 1);
+
+var videoTrack = stream.getVideoTracks()[0];
+return videoTrack.applyConstraints({advanced: []});
+}, 'MediaStreamTrack.applyConstraints({advanced: []})');
+
+// This test verifies that applyConstraints() rejects the returned Promise if
+// passed a non-supported image-capture constraint (https://crbug.com/711694).
+promise_test(async function(t) {
+ var canvas = document.getElementById('canvas');
+ var context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+
+ var stream = canvas.captureStream();
+ var videoTrack = stream.getVideoTracks()[0];
+
+ // Use e.g. |torch| as an example of unsupported constraint.
+ assert_false("torch" in videoTrack.getCapabilities());
+ try {
+ await videoTrack.applyConstraints({advanced : [ {torch : true} ]});
+ } catch (error) {
+ assert_equals(error.name, 'OverconstrainedError');
+ return;
+ }
+ assert_unreached('expected applyConstraints to reject');
+}, 'MediaStreamTrack.applyConstraints() with unsupported constraint');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-getSettings.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-getSettings.https.html
new file mode 100644
index 0000000000..1309d3bded
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-getSettings.https.html
@@ -0,0 +1,119 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+// This test verifies that the |constraints| configured in the mock Mojo
+// service implementation, are returned by MediaStreamTrack.getSettings().
+
+image_capture_test(async t => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ const constraints = { advanced : [{ whiteBalanceMode : 'single-shot',
+ exposureMode : 'manual',
+ focusMode : 'single-shot',
+
+ pointsOfInterest : [{x : 0.1, y : 0.2},
+ {x : 0.3, y : 0.4}],
+
+ exposureCompensation : 133.77,
+ // in nano-seconds.
+ exposureTime : 10000,
+ colorTemperature : 6000,
+ iso : 120.0,
+
+ brightness : 3,
+ contrast : 4,
+ saturation : 5,
+ sharpness : 6,
+ focusDistance : 7,
+
+ pan : 8,
+ tilt : 9,
+ zoom : 3.141592,
+
+ torch : true
+ }]};
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ try {
+ await videoTrack.applyConstraints(constraints);
+ } catch (error) {
+ assert_unreached('Error applying constraints: ' + error.message);
+ }
+
+ let settings = videoTrack.getSettings();
+ assert_equals(typeof settings, 'object');
+
+ assert_equals(constraints.advanced[0].whiteBalanceMode,
+ settings.whiteBalanceMode, 'whiteBalanceMode');
+ assert_equals(constraints.advanced[0].exposureMode, settings.exposureMode,
+ 'exposureMode');
+ assert_equals(constraints.advanced[0].focusMode, settings.focusMode,
+ 'focusMode');
+
+ assert_point2d_array_approx_equals(
+ constraints.advanced[0].pointsOfInterest, settings.pointsOfInterest,
+ 0.01);
+
+ assert_equals(constraints.advanced[0].exposureCompensation,
+ settings.exposureCompensation, 'exposureCompensation');
+ assert_equals(constraints.advanced[0].exposureTime,
+ settings.exposureTime, 'exposureTime');
+ assert_equals(constraints.advanced[0].colorTemperature,
+ settings.colorTemperature, 'colorTemperature');
+ assert_equals(constraints.advanced[0].iso, settings.iso, 'iso');
+
+ assert_equals(constraints.advanced[0].brightness, settings.brightness,
+ 'brightness');
+ assert_equals(constraints.advanced[0].contrast, settings.contrast,
+ 'contrast');
+ assert_equals(constraints.advanced[0].saturation, settings.saturation,
+ 'saturation');
+ assert_equals(constraints.advanced[0].sharpness, settings.sharpness,
+ 'sharpness');
+
+ assert_equals(constraints.advanced[0].focusDistance, settings.focusDistance,
+ 'focusDistance');
+
+ assert_equals(constraints.advanced[0].pan, settings.pan, 'pan');
+ assert_equals(constraints.advanced[0].tilt, settings.tilt, 'tilt');
+ assert_equals(constraints.advanced[0].zoom, settings.zoom, 'zoom');
+
+ assert_equals(constraints.advanced[0].torch, settings.torch, 'torch');
+
+}, 'exercises an applyConstraints() - getSettings() cycle with PTZ permission granted');
+
+
+// This test verifies that the PTZ |constraints| configured in the mock Mojo
+// service implementation can't be applied if PTZ permission is denied.
+
+image_capture_test(async t => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'denied');
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ const constraints = [{ pan: 8 }, { tilt: 9 }];
+ await Promise.all(constraints.map(async constraint => {
+ try {
+ await videoTrack.applyConstraints({ advanced: [constraint] });
+ } catch (error) {
+ assert_equals(error.name, 'OverconstrainedError');
+ return;
+ }
+ assert_unreached(
+ "applyConstraints should throw a NotSupportedError for " +
+ JSON.stringify(constraint));
+ }));
+
+}, 'exercises an applyConstraints() with PTZ permission denied');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-reject.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-reject.https.html
new file mode 100644
index 0000000000..ac6216ae19
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints-reject.https.html
@@ -0,0 +1,78 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+// This test verifies that MediaStreamTrack.applyConstraints() rejects if any
+// passed constraint is unsupported or outside its allowed range.
+var makePromiseTest = function(getConstraint) {
+ image_capture_test(async (t, imageCaptureTest) => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ imageCaptureTest.mockImageCapture().state().supportsTorch = false;
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ try {
+ const constraints = {
+ advanced : [ getConstraint(imageCaptureTest.mockImageCapture().state()) ]
+ };
+
+ await videoTrack.applyConstraints(constraints);
+ assert_unreached('expected applyConstraints to reject');
+ } catch (error) {
+ assert_equals(error.name, 'OverconstrainedError');
+ }
+ });
+};
+
+const constraintGenerators = [
+ capabilities => ({ whiteBalanceMode: 'manual' }),
+ capabilities => ({ exposureMode: 'none' }),
+ capabilities => ({ focusMode: 'continuous' }),
+ capabilities => ({
+ exposureCompensation: capabilities.exposureCompensation.max + 1
+ }),
+ capabilities => ({
+ exposureCompensation: capabilities.exposureCompensation.min - 1
+ }),
+ capabilities => ({
+ colorTemperature: capabilities.colorTemperature.max + 1
+ }),
+ capabilities => ({
+ colorTemperature: capabilities.colorTemperature.min - 1
+ }),
+ capabilities => ({ iso: capabilities.iso.max + 1 }),
+ capabilities => ({ iso: capabilities.iso.min - 1 }),
+ capabilities => ({ brightness: capabilities.brightness.max + 1 }),
+ capabilities => ({ brightness: capabilities.brightness.min - 1 }),
+ capabilities => ({ contrast: capabilities.contrast.max + 1 }),
+ capabilities => ({ contrast: capabilities.contrast.min - 1 }),
+ capabilities => ({ saturation: capabilities.saturation.max + 1 }),
+ capabilities => ({ saturation: capabilities.saturation.min - 1 }),
+ capabilities => ({ sharpness: capabilities.sharpness.max + 1 }),
+ capabilities => ({ sharpness: capabilities.sharpness.min - 1 }),
+ capabilities => ({ pan: capabilities.pan.max + 1 }),
+ capabilities => ({ pan: capabilities.pan.min - 1 }),
+ capabilities => ({ tilt: capabilities.tilt.max + 1 }),
+ capabilities => ({ tilt: capabilities.tilt.min - 1 }),
+ capabilities => ({ zoom: capabilities.zoom.max + 1 }),
+ capabilities => ({ zoom: capabilities.zoom.min - 1 }),
+ capabilities => ({ torch: true }),
+];
+
+for (key in constraintGenerators) {
+ generate_tests(
+ makePromiseTest, [[
+ 'MediaStreamTrack.applyConstraints(constraints) rejects with bad' +
+ ' constraints, #' + key,
+ constraintGenerators[key]
+ ]]);
+}
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints.https.html
new file mode 100644
index 0000000000..bfbf04afdb
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-applyConstraints.https.html
@@ -0,0 +1,112 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+const meteringModeNames = ['none', 'manual', 'single-shot', 'continuous'];
+
+// This test verifies that we can all MediaStreamTrack.applyConstraints(), with
+// a mock Mojo service implementation.
+
+image_capture_test(async (t, imageCaptureTest) => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ const constraints = { advanced : [{ whiteBalanceMode : 'single-shot',
+ exposureMode : 'manual',
+ focusMode : 'single-shot',
+
+ pointsOfInterest : [{x : 0.1, y : 0.2},
+ {x : 0.3, y : 0.4}],
+
+ exposureCompensation : 133.77,
+ exposureTime : 10000,
+ colorTemperature : 6000,
+ iso : 120.0,
+
+ brightness : 3,
+ contrast : 4,
+ saturation : 5,
+ sharpness : 6,
+ focusDistance : 7,
+
+ pan : 8,
+ tilt : 9,
+ zoom : 3.141592,
+
+ torch : true
+ }]};
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ try {
+ await videoTrack.applyConstraints(constraints);
+ } catch (error) {
+ assert_unreached('applyConstraints(): ' + error.message);
+ }
+
+ const constraintsDict = constraints.advanced[0];
+ let appliedConstraints = videoTrack.getConstraints();
+ const appliedConstraintsDict = appliedConstraints.advanced[0];
+
+ // Check that |appliedConstraints| and |constraints| are equal.
+ assert_equals(constraintsDict.length, appliedConstraintsDict.length);
+ Object.keys(appliedConstraintsDict).forEach((key, value) => {
+ assert_not_equals(constraintsDict[key], undefined, 'key ' + key);
+ if (key != 'pointsOfInterest') {
+ assert_equals(constraintsDict[key], appliedConstraintsDict[key], key);
+ } else {
+ assert_point2d_array_approx_equals(constraintsDict[key],
+ appliedConstraintsDict[key], 0.01);
+ }
+ });
+
+ let theMock = imageCaptureTest.mockImageCapture();
+ assert_equals(constraintsDict.whiteBalanceMode,
+ meteringModeNames[theMock.options().whiteBalanceMode],
+ 'whiteBalanceMode');
+ assert_equals(constraintsDict.exposureMode,
+ meteringModeNames[theMock.options().exposureMode],
+ 'exposureMode');
+ assert_equals(constraintsDict.focusMode,
+ meteringModeNames[theMock.options().focusMode],
+ 'focusMode');
+
+ assert_point2d_array_approx_equals(constraintsDict.pointsOfInterest,
+ theMock.options().pointsOfInterest,
+ 0.01);
+
+ assert_equals(constraintsDict.exposureCompensation,
+ theMock.options().exposureCompensation,
+ 'exposureCompensation');
+ assert_equals(constraintsDict.exposureTime,
+ theMock.options().exposureTime,
+ 'exposureTime');
+ assert_equals(constraintsDict.colorTemperature,
+ theMock.options().colorTemperature, 'colorTemperature');
+ assert_equals(constraintsDict.iso, theMock.options().iso, 'iso');
+
+ assert_equals(constraintsDict.brightness, theMock.options().brightness,
+ 'brightness');
+ assert_equals(constraintsDict.contrast, theMock.options().contrast,
+ 'constrast');
+ assert_equals(constraintsDict.saturation, theMock.options().saturation,
+ 'saturation');
+ assert_equals(constraintsDict.sharpness, theMock.options().sharpness,
+ 'sharpness');
+ assert_equals(constraintsDict.focusDistance, theMock.options().focusDistance
+ ,'focusDistance');
+
+ assert_equals(constraintsDict.pan, theMock.options().pan, 'pan');
+ assert_equals(constraintsDict.tilt, theMock.options().tilt, 'tilt');
+ assert_equals(constraintsDict.zoom, theMock.options().zoom, 'zoom');
+
+ assert_equals(constraintsDict.torch, theMock.options().torch, 'torch');
+
+}, 'exercises MediaStreamTrack.applyConstraints(constraints)');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-clone.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-clone.https.html
new file mode 100644
index 0000000000..3ee21031eb
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-clone.https.html
@@ -0,0 +1,362 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+// This test verifies that we can set some nondefault constraints, then clone a
+// MediaStreamTrack and check that the cloned constraints are the same as the
+// original, with a mock Mojo service implementation.
+image_capture_test(async (t, imageCaptureTest) => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ const constraints = { advanced : [{ whiteBalanceMode : 'single-shot',
+ exposureMode : 'manual',
+ focusMode : 'single-shot',
+
+ pointsOfInterest : [{x : 0.1, y : 0.2},
+ {x : 0.3, y : 0.4}],
+
+ exposureCompensation : 133.77,
+ exposureTime : 10000,
+ colorTemperature : 6000,
+ iso : 120.0,
+
+ brightness : 3,
+ contrast : 4,
+ saturation : 5,
+ sharpness : 6,
+ focusDistance : 7,
+
+ pan : 8,
+ tilt : 9,
+ zoom : 3.141592,
+
+ torch : true
+ }]};
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let originalVideoTrack = stream.getVideoTracks()[0];
+
+ await originalVideoTrack.applyConstraints(constraints);
+
+ let appliedConstraints = originalVideoTrack.getConstraints();
+
+ let clonedVideoTrack = originalVideoTrack.clone();
+ let appliedClonedConstraints = clonedVideoTrack.getConstraints();
+ assert_true('advanced' in appliedClonedConstraints);
+ assert_equals(appliedClonedConstraints.advanced.length, 1);
+ const appliedClonedAdvancedConstraints = appliedClonedConstraints.advanced[0];
+
+ // Check that |appliedClonedAdvancedConstraints| and |appliedAdvancedConstraints| are equal.
+ const appliedAdvancedConstraints = appliedConstraints.advanced[0];
+ assert_equals(appliedAdvancedConstraints.length, appliedClonedAdvancedConstraints.length);
+ Object.keys(appliedClonedAdvancedConstraints).forEach((key, value) => {
+ assert_not_equals(appliedAdvancedConstraints[key], undefined, 'key ' + key);
+ if (key != 'pointsOfInterest') {
+ assert_equals(appliedAdvancedConstraints[key], appliedClonedAdvancedConstraints[key], key);
+ } else {
+ assert_point2d_array_approx_equals(appliedAdvancedConstraints[key],
+ appliedClonedAdvancedConstraints[key], 0.01);
+ }
+ });
+
+ assert_equals(appliedAdvancedConstraints.whiteBalanceMode,
+ appliedClonedAdvancedConstraints.whiteBalanceMode,
+ 'whiteBalanceMode');
+ assert_equals(appliedAdvancedConstraints.exposureMode,
+ appliedClonedAdvancedConstraints.exposureMode,
+ 'exposureMode');
+ assert_equals(appliedAdvancedConstraints.focusMode,
+ appliedClonedAdvancedConstraints.focusMode,
+ 'focusMode');
+
+ assert_point2d_array_approx_equals(
+ appliedAdvancedConstraints.pointsOfInterest,
+ appliedClonedAdvancedConstraints.pointsOfInterest,
+ 0.01);
+
+ assert_equals(appliedAdvancedConstraints.exposureCompensation,
+ appliedClonedAdvancedConstraints.exposureCompensation,
+ 'exposureCompensation');
+ assert_equals(appliedAdvancedConstraints.exposureTime,
+ appliedClonedAdvancedConstraints.exposureTime,
+ 'exposureTime');
+ assert_equals(appliedAdvancedConstraints.colorTemperature,
+ appliedClonedAdvancedConstraints.colorTemperature,
+ 'colorTemperature');
+ assert_equals(appliedAdvancedConstraints.iso,
+ appliedClonedAdvancedConstraints.iso,
+ 'iso');
+ assert_equals(appliedAdvancedConstraints.brightness,
+ appliedClonedAdvancedConstraints.brightness,
+ 'brightness');
+ assert_equals(appliedAdvancedConstraints.contrast,
+ appliedClonedAdvancedConstraints.contrast,
+ 'constrast');
+ assert_equals(appliedAdvancedConstraints.saturation,
+ appliedClonedAdvancedConstraints.saturation,
+ 'saturation');
+ assert_equals(appliedAdvancedConstraints.sharpness,
+ appliedClonedAdvancedConstraints.sharpness,
+ 'sharpness');
+ assert_equals(appliedAdvancedConstraints.focusDistance,
+ appliedClonedAdvancedConstraints.focusDistance,
+ 'focusDistance');
+
+ assert_equals(appliedAdvancedConstraints.pan,
+ appliedClonedAdvancedConstraints.pan,
+ 'pan');
+ assert_equals(appliedAdvancedConstraints.tilt,
+ appliedClonedAdvancedConstraints.tilt,
+ 'tilt');
+ assert_equals(appliedAdvancedConstraints.zoom,
+ appliedClonedAdvancedConstraints.zoom,
+ 'zoom');
+
+ assert_equals(appliedAdvancedConstraints.torch,
+ appliedClonedAdvancedConstraints.torch,
+ 'torch');
+}, 'checks MediaStreamTrack.clone() gets same applied constraints');
+
+// This test verifies that MediaStreamTrack ImageCapture settings are copied
+// when cloning a MediaStreamTrack.
+image_capture_test(async (t, imageCaptureTest) => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let originalVideoTrack = stream.getVideoTracks()[0];
+ let clonedVideoTrack = originalVideoTrack.clone();
+
+ let settings = originalVideoTrack.getSettings();
+ let clonedSettings = clonedVideoTrack.getSettings();
+
+ assert_equals(settings.whiteBalanceMode,
+ clonedSettings.whiteBalanceMode,
+ 'whiteBalanceMode');
+ assert_equals(settings.exposureMode,
+ clonedSettings.exposureMode,
+ 'exposureMode;');
+ assert_equals(settings.focusMode,
+ clonedSettings.focusMode,
+ 'focusMode');
+
+ assert_point2d_array_approx_equals(
+ settings.pointsOfInterest,
+ clonedSettings.pointsOfInterest,
+ 0.01);
+
+ assert_equals(settings.exposureCompensation,
+ clonedSettings.exposureCompensation,
+ 'exposureCompensation');
+ assert_equals(settings.exposureTime,
+ clonedSettings.exposureTime,
+ 'exposureTime');
+ assert_equals(settings.colorTemperature,
+ clonedSettings.colorTemperature,
+ 'colorTemperature');
+ assert_equals(settings.iso,
+ clonedSettings.iso,
+ 'iso');
+
+ assert_equals(settings.brightness,
+ clonedSettings.brightness,
+ 'brightness');
+ assert_equals(settings.contrast,
+ clonedSettings.contrast,
+ 'contrast');
+ assert_equals(settings.saturation,
+ clonedSettings.saturation,
+ 'saturation');
+ assert_equals(settings.sharpness,
+ clonedSettings.sharpness,
+ 'sharpness');
+
+ assert_equals(settings.focusDistance,
+ clonedSettings.focusDistance,
+ 'focusDistance');
+
+ assert_equals(settings.pan,
+ clonedSettings.pan,
+ 'pan');
+ assert_equals(settings.tilt,
+ clonedSettings.tilt,
+ 'tilt');
+ assert_equals(settings.zoom,
+ clonedSettings.zoom,
+ 'zoom');
+
+ assert_equals(settings.torch,
+ clonedSettings.torch,
+ 'torch');
+}, 'checks MediaStreamTrack.clone() gets same settings');
+
+// This test verifies that MediaStreamTrack ImageCapture capabilities are copied
+// when cloning a MediaStreamTrack.
+image_capture_test(async (t, imageCaptureTest) => {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let originalVideoTrack = stream.getVideoTracks()[0];
+ let clonedVideoTrack = originalVideoTrack.clone();
+
+ let capabilities = originalVideoTrack.getCapabilities();
+ let clonedCapabilities = clonedVideoTrack.getCapabilities();
+
+ assert_equals(capabilities.whiteBalanceMode.length,
+ clonedCapabilities.whiteBalanceMode.length,
+ 'whiteBalanceMode length');
+ for (i = 0; i < capabilities.whiteBalanceMode.length; ++i) {
+ assert_equals(capabilities.whiteBalanceMode[i],
+ clonedCapabilities.whiteBalanceMode[i],
+ 'whiteBalanceMode');
+ }
+
+ assert_equals(capabilities.exposureMode.length,
+ clonedCapabilities.exposureMode.length,
+ 'exposureMode length');
+ for (i = 0; i < capabilities.exposureMode.length; ++i) {
+ assert_equals(capabilities.exposureMode[i],
+ clonedCapabilities.exposureMode[i],
+ 'exposureMode');
+ }
+
+ assert_equals(capabilities.focusMode.length,
+ clonedCapabilities.focusMode.length,
+ 'focusMode length');
+ for (i = 0; i < capabilities.focusMode.length; ++i) {
+ assert_equals(capabilities.focusMode[i],
+ clonedCapabilities.focusMode[i],
+ 'focusMode');
+ }
+
+ assert_equals(capabilities.exposureCompensation.max,
+ clonedCapabilities.exposureCompensation.max,
+ 'exposureCompensation max');
+ assert_equals(capabilities.exposureCompensation.min,
+ clonedCapabilities.exposureCompensation.min,
+ 'exposureCompensation min');
+ assert_equals(capabilities.exposureCompensation.step,
+ clonedCapabilities.exposureCompensation.step,
+ 'exposureCompensation step');
+
+ assert_equals(capabilities.exposureTime.max,
+ clonedCapabilities.exposureTime.max,
+ 'exposureTime max');
+ assert_equals(capabilities.exposureTime.min,
+ clonedCapabilities.exposureTime.min,
+ 'exposureTime min');
+ assert_equals(capabilities.exposureTime.step,
+ clonedCapabilities.exposureTime.step,
+ 'exposureTime step');
+
+ assert_equals(capabilities.colorTemperature.max,
+ clonedCapabilities.colorTemperature.max,
+ 'colorTemperature max');
+ assert_equals(capabilities.colorTemperature.min,
+ clonedCapabilities.colorTemperature.min,
+ 'colorTemperature min');
+ assert_equals(capabilities.colorTemperature.step,
+ clonedCapabilities.colorTemperature.step,
+ 'colorTemperature step');
+
+ assert_equals(capabilities.iso.max,
+ clonedCapabilities.iso.max,
+ 'iso max');
+ assert_equals(capabilities.iso.min,
+ clonedCapabilities.iso.min,
+ 'iso min');
+ assert_equals(capabilities.iso.step,
+ clonedCapabilities.iso.step,
+ 'iso step');
+
+ assert_equals(capabilities.brightness.max,
+ clonedCapabilities.brightness.max,
+ 'brightness max');
+ assert_equals(capabilities.brightness.min,
+ clonedCapabilities.brightness.min,
+ 'brightness min');
+ assert_equals(capabilities.brightness.step,
+ clonedCapabilities.brightness.step,
+ 'brightness step');
+
+ assert_equals(capabilities.contrast.max,
+ clonedCapabilities.contrast.max,
+ 'contrast max');
+ assert_equals(capabilities.contrast.min,
+ clonedCapabilities.contrast.min,
+ 'contrast min');
+ assert_equals(capabilities.contrast.step,
+ clonedCapabilities.contrast.step,
+ 'contrast step');
+
+ assert_equals(capabilities.saturation.max,
+ clonedCapabilities.saturation.max,
+ 'saturation max');
+ assert_equals(capabilities.saturation.min,
+ clonedCapabilities.saturation.min,
+ 'saturation min');
+ assert_equals(capabilities.saturation.step,
+ clonedCapabilities.saturation.step,
+ 'saturation step');
+
+ assert_equals(capabilities.sharpness.max,
+ clonedCapabilities.sharpness.max,
+ 'sharpness max');
+ assert_equals(capabilities.sharpness.min,
+ clonedCapabilities.sharpness.min,
+ 'sharpness min');
+ assert_equals(capabilities.sharpness.step,
+ clonedCapabilities.sharpness.step,
+ 'sharpness step');
+
+ assert_equals(capabilities.focusDistance.max,
+ clonedCapabilities.focusDistance.max,
+ 'focusDistance max');
+ assert_equals(capabilities.focusDistance.min,
+ clonedCapabilities.focusDistance.min,
+ 'focusDistance min');
+ assert_equals(capabilities.focusDistance.step,
+ clonedCapabilities.focusDistance.step,
+ 'focusDistance step');
+
+ assert_equals(capabilities.pan.max,
+ clonedCapabilities.pan.max,
+ 'pan max');
+ assert_equals(capabilities.pan.min,
+ clonedCapabilities.pan.min,
+ 'pan min');
+ assert_equals(capabilities.pan.step,
+ clonedCapabilities.pan.step,
+ 'pan step');
+
+ assert_equals(capabilities.tilt.max,
+ clonedCapabilities.tilt.max,
+ 'tilt max');
+ assert_equals(capabilities.tilt.min,
+ clonedCapabilities.tilt.min,
+ 'tilt min');
+ assert_equals(capabilities.tilt.step,
+ clonedCapabilities.tilt.step,
+ 'tilt step');
+
+ assert_equals(capabilities.zoom.max,
+ clonedCapabilities.zoom.max,
+ 'zoom max');
+ assert_equals(capabilities.zoom.min,
+ clonedCapabilities.zoom.min,
+ 'zoom min');
+ assert_equals(capabilities.zoom.step,
+ clonedCapabilities.zoom.step,
+ 'zoom step');
+
+ assert_equals(capabilities.torch,
+ clonedCapabilities.torch,
+ 'torch');
+}, 'checks MediaStreamTrack.clone() gets same capabilities');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities-fast.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities-fast.html
new file mode 100644
index 0000000000..55272d1499
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities-fast.html
@@ -0,0 +1,29 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that MediaStreamTrack.getCapabilities() exists and that it
+// returns something. Other tests go deeper.
+test(function() {
+ var canvas = document.getElementById('canvas');
+ var context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+
+ var stream = canvas.captureStream();
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ var videoTrack = stream.getVideoTracks()[0];
+
+ assert_equals(typeof videoTrack.getCapabilities, 'function');
+
+ capabilities = videoTrack.getCapabilities();
+ assert_equals(typeof capabilities, 'object');
+}, 'MediaStreamTrack.getCapabilities()');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities.https.html
new file mode 100644
index 0000000000..6a4835a475
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getCapabilities.https.html
@@ -0,0 +1,159 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+const meteringModeNames = ['none', 'manual', 'single-shot', 'continuous'];
+
+// This test verifies that MediaTrackCapabilities are returned upon
+// MediaStreamTrack.getCapabilities(), with a mock Mojo service implementation.
+// When PTZ permission is denied though, PTZ capabilities are not available.
+
+function makeImageCaptureTest(hasPanTiltZoomPermissionGranted) {
+ image_capture_test(async (t, imageCaptureTest) => {
+ const ptzPermission = hasPanTiltZoomPermissionGranted ? 'granted' : 'denied';
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ ptzPermission);
+
+ let mockCapabilities = imageCaptureTest.mockImageCapture().state();
+
+ // |stream| must be created _after_ |mock| is constructed to give the
+ // latter time to override the bindings.
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ let videoTrack = stream.getVideoTracks()[0];
+ assert_equals(typeof videoTrack.getCapabilities, 'function');
+
+ let capabilities = videoTrack.getCapabilities();
+ assert_equals(typeof capabilities, 'object');
+
+ assert_equals(capabilities.whiteBalanceMode.length,
+ mockCapabilities.supportedWhiteBalanceModes.length,
+ 'whiteBalanceMode');
+ for (i = 0; i < capabilities.whiteBalanceMode.length; ++i) {
+ assert_equals(
+ capabilities.whiteBalanceMode[i],
+ meteringModeNames[mockCapabilities
+ .supportedWhiteBalanceModes[i]],
+ 'whiteBalanceMode');
+ }
+
+ assert_equals(capabilities.exposureMode.length,
+ mockCapabilities.supportedExposureModes.length,
+ 'exposureMode');
+ for (i = 0; i < capabilities.exposureMode.length; ++i) {
+ assert_equals(
+ capabilities.exposureMode[i],
+ meteringModeNames[mockCapabilities.supportedExposureModes[i]],
+ 'exposureMode');
+ }
+
+ assert_equals(capabilities.focusMode.length,
+ mockCapabilities.supportedFocusModes.length,
+ 'focusMode');
+ for (i = 0; i < capabilities.focusMode.length; ++i) {
+ assert_equals(
+ capabilities.focusMode[i],
+ meteringModeNames[mockCapabilities.supportedFocusModes[i]],
+ 'focusMode');
+ }
+
+ assert_equals(capabilities.exposureCompensation.max,
+ mockCapabilities.exposureCompensation.max);
+ assert_equals(capabilities.exposureCompensation.min,
+ mockCapabilities.exposureCompensation.min);
+ assert_equals(capabilities.exposureCompensation.step,
+ mockCapabilities.exposureCompensation.step);
+
+ assert_equals(capabilities.exposureTime.max,
+ mockCapabilities.exposureTime.max);
+ assert_equals(capabilities.exposureTime.min,
+ mockCapabilities.exposureTime.min);
+ assert_equals(capabilities.exposureTime.step,
+ mockCapabilities.exposureTime.step);
+
+ assert_equals(capabilities.colorTemperature.max,
+ mockCapabilities.colorTemperature.max);
+ assert_equals(capabilities.colorTemperature.min,
+ mockCapabilities.colorTemperature.min);
+ assert_equals(capabilities.colorTemperature.step,
+ mockCapabilities.colorTemperature.step);
+
+ assert_equals(capabilities.iso.max, mockCapabilities.iso.max);
+ assert_equals(capabilities.iso.min, mockCapabilities.iso.min);
+ assert_equals(capabilities.iso.step, mockCapabilities.iso.step);
+
+ assert_equals(capabilities.brightness.max,
+ mockCapabilities.brightness.max);
+ assert_equals(capabilities.brightness.min,
+ mockCapabilities.brightness.min);
+ assert_equals(capabilities.brightness.step,
+ mockCapabilities.brightness.step);
+
+ assert_equals(capabilities.contrast.max,
+ mockCapabilities.contrast.max);
+ assert_equals(capabilities.contrast.min,
+ mockCapabilities.contrast.min);
+ assert_equals(capabilities.contrast.step,
+ mockCapabilities.contrast.step);
+
+ assert_equals(capabilities.saturation.max,
+ mockCapabilities.saturation.max);
+ assert_equals(capabilities.saturation.min,
+ mockCapabilities.saturation.min);
+ assert_equals(capabilities.saturation.step,
+ mockCapabilities.saturation.step);
+
+ assert_equals(capabilities.sharpness.max,
+ mockCapabilities.sharpness.max);
+ assert_equals(capabilities.sharpness.min,
+ mockCapabilities.sharpness.min);
+ assert_equals(capabilities.sharpness.step,
+ mockCapabilities.sharpness.step);
+
+ assert_equals(capabilities.focusDistance.max,
+ mockCapabilities.focusDistance.max);
+ assert_equals(capabilities.focusDistance.min,
+ mockCapabilities.focusDistance.min);
+ assert_equals(capabilities.focusDistance.step,
+ mockCapabilities.focusDistance.step);
+
+ if (ptzPermission === 'granted') {
+ assert_equals(capabilities.pan.max, mockCapabilities.pan.max);
+ assert_equals(capabilities.pan.min, mockCapabilities.pan.min);
+ assert_equals(capabilities.pan.step, mockCapabilities.pan.step);
+
+ assert_equals(capabilities.tilt.max, mockCapabilities.tilt.max);
+ assert_equals(capabilities.tilt.min, mockCapabilities.tilt.min);
+ assert_equals(capabilities.tilt.step, mockCapabilities.tilt.step);
+
+ assert_equals(capabilities.zoom.max, mockCapabilities.zoom.max);
+ assert_equals(capabilities.zoom.min, mockCapabilities.zoom.min);
+ assert_equals(capabilities.zoom.step, mockCapabilities.zoom.step);
+ } else if (ptzPermission === 'denied') {
+ assert_false('pan' in capabilities);
+ assert_false('tilt' in capabilities);
+ assert_false('zoom' in capabilities);
+ }
+
+ assert_equals(capabilities.torch, mockCapabilities.supportsTorch,
+ 'torch');
+ });
+}
+
+generate_tests(makeImageCaptureTest, [
+ [
+ "exercises MediaStreamTrack.getCapabilities() with PTZ permission denied",
+ false,
+ ],
+ [
+ "exercises MediaStreamTrack.getCapabilities() with PTZ permission granted",
+ true,
+ ],
+]);
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getConstraints.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getConstraints.https.html
new file mode 100644
index 0000000000..70cd2f2b07
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getConstraints.https.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+const constraints = { whiteBalanceMode : "single-shot",
+ exposureMode : "manual",
+ focusMode : "single-shot",
+
+ exposureCompensation : 133.77,
+ exposureTime : 10000, // in nano-seconds.
+ colorTemperature : 6000,
+ iso : 120.0,
+
+ brightness : 3,
+ contrast : 4,
+ saturation : 5,
+ sharpness : 6,
+ focusDistance : 7,
+
+ pan : 8,
+ tilt : 9,
+ zoom : 3.141592
+ // TODO: torch https://crbug.com/700607.
+ };
+
+// These tests verify that MediaStreamTrack.getConstraints() exists and that,
+// returns the constraints passed beforehand with applyConstraints.
+function makePromiseTest(constraint) {
+ image_capture_test(async function(t) {
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ 'granted');
+
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ let constraintsIn = {advanced : [ constraint ]};
+ await videoTrack.applyConstraints(constraintsIn);
+ assert_object_equals(videoTrack.getConstraints(), constraintsIn, "constraints");
+
+ // Clear constraints by sending an empty constraint set.
+ await videoTrack.applyConstraints({});
+ assert_object_equals(videoTrack.getConstraints(), {}, "constraints");
+ });
+};
+
+// Send each line of |constraints| in turn and then the whole dictionary.
+for (key in constraints) {
+ let one_constraint = {};
+ one_constraint[key] = constraints[key];
+ generate_tests(
+ makePromiseTest,
+ [[ 'MediaStreamTrack.getConstraints(), key: ' + key, one_constraint ]]);
+}
+
+generate_tests(makePromiseTest, [
+ ["MediaStreamTrack.getConstraints(), complete ", constraints],
+]);
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings-fast.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings-fast.html
new file mode 100644
index 0000000000..a4ecbe6118
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings-fast.html
@@ -0,0 +1,29 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that MediaStreamTrack.getSettings() exists and that it
+// returns something. Other tests go deeper.
+test(function() {
+ var canvas = document.getElementById('canvas');
+ var context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+
+ var stream = canvas.captureStream();
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ var videoTrack = stream.getVideoTracks()[0];
+
+ assert_equals(typeof videoTrack.getSettings, 'function');
+
+ settings = videoTrack.getSettings();
+ assert_equals(typeof settings, 'object');
+}, 'MediaStreamTrack.getSettings()');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings.https.html b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings.https.html
new file mode 100644
index 0000000000..bd8a1ea100
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/MediaStreamTrack-getSettings.https.html
@@ -0,0 +1,89 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<script>
+
+const meteringModeNames = ['none', 'manual', 'single-shot', 'continuous'];
+
+// This test verifies that the settings defined in the mock Mojo service
+// implementation are the same as those returned by the corresponding
+// MediaStreamTrack.getSettings(), except for PTZ settings when PTZ
+// permission is denied.
+
+function makeImageCaptureTest(hasPanTiltZoomPermissionGranted) {
+ image_capture_test(async (t, imageCaptureTest) => {
+ const ptzPermission = hasPanTiltZoomPermissionGranted ? 'granted' : 'denied';
+ await test_driver.set_permission({name: 'camera', panTiltZoom: true},
+ ptzPermission);
+
+ let mockSettings = imageCaptureTest.mockImageCapture().state();
+
+ // |stream| must be created _after_ |mock| is constructed to give the
+ // latter time to override the bindings.
+ let stream = await navigator.mediaDevices.getUserMedia({video: true});
+ let videoTrack = stream.getVideoTracks()[0];
+
+ // |videoTrack|s settings retrieval, just like the actual capture, is a
+ // process kicked right after creation, we introduce a small delay to
+ // allow for those to be collected.
+ await new Promise(resolve => step_timeout(resolve, 100));
+
+ let settings = videoTrack.getSettings();
+ assert_equals(typeof settings, 'object');
+
+ assert_equals(settings.whiteBalanceMode,
+ meteringModeNames[mockSettings.currentWhiteBalanceMode],
+ 'whiteBalanceMode');
+ assert_equals(settings.exposureMode,
+ meteringModeNames[mockSettings.currentExposureMode],
+ 'exposureMode;');
+ assert_equals(settings.focusMode,
+ meteringModeNames[mockSettings.currentFocusMode],
+ 'focusMode');
+
+ assert_point2d_array_approx_equals(
+ settings.pointsOfInterest, mockSettings.pointsOfInterest, 0.01);
+
+ assert_equals(settings.exposureCompensation,
+ mockSettings.exposureCompensation.current);
+ assert_equals(settings.exposureTime,
+ mockSettings.exposureTime.current);
+ assert_equals(settings.colorTemperature,
+ mockSettings.colorTemperature.current);
+ assert_equals(settings.iso, mockSettings.iso.current);
+
+ assert_equals(settings.brightness, mockSettings.brightness.current);
+ assert_equals(settings.contrast, mockSettings.contrast.current);
+ assert_equals(settings.saturation, mockSettings.saturation.current);
+ assert_equals(settings.sharpness, mockSettings.sharpness.current);
+
+ assert_equals(settings.focusDistance, mockSettings.focusDistance.current);
+
+ if (ptzPermission === 'granted') {
+ assert_equals(settings.pan, mockSettings.pan.current);
+ assert_equals(settings.tilt, mockSettings.tilt.current);
+ assert_equals(settings.zoom, mockSettings.zoom.current);
+ } else if (ptzPermission === 'denied') {
+ assert_false('pan' in settings);
+ assert_false('tilt' in settings);
+ assert_false('zoom' in settings);
+ }
+
+ assert_equals(settings.torch, mockSettings.torch, 'torch');
+ });
+}
+
+generate_tests(makeImageCaptureTest, [
+ [
+ "exercises MediaStreamTrack.getSettings() with PTZ permission denied",
+ false,
+ ],
+ [
+ "exercises MediaStreamTrack.getSettings() with PTZ permission granted",
+ true,
+ ],
+]);
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/detached-HTMLCanvasElement.html b/testing/web-platform/tests/mediacapture-image/detached-HTMLCanvasElement.html
new file mode 100644
index 0000000000..e27950fc5e
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/detached-HTMLCanvasElement.html
@@ -0,0 +1,26 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<script>
+
+async_test(t => {
+ let iframe = document.createElement('iframe');
+ let html = "<canvas id='canvas' width=10 height=10 />";
+ iframe.srcdoc = html;
+ iframe.onload = t.step_func_done(() => {
+ // This detaches the frame while retaining a reference to an
+ // HTMLCanvasElement from it.
+ let canvas = iframe.contentWindow.document.getElementById('canvas');
+ document.body.removeChild(iframe);
+
+ // Creation of the ImageCapture object (as part of the MediaStreamTrack)
+ // should be safe even if the frame is detached.
+ canvas.captureStream();
+ });
+
+ document.body.appendChild(iframe);
+}, 'MediaStreamTrack can be obtained from a detached frame');
+
+</script>
+</body>
diff --git a/testing/web-platform/tests/mediacapture-image/getPhotoCapabilities.html b/testing/web-platform/tests/mediacapture-image/getPhotoCapabilities.html
new file mode 100644
index 0000000000..94962a03f4
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/getPhotoCapabilities.html
@@ -0,0 +1,73 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that ImageCapture can get PhotoCapabilities(), with a mock
+// Mojo interface implementation.
+
+image_capture_test(async (t, imageCaptureTest) => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+
+ let mockCapabilities = imageCaptureTest.mockImageCapture().state();
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ assert_equals(typeof capturer.getPhotoCapabilities, 'function');
+
+ let capabilities = await capturer.getPhotoCapabilities();
+
+ assert_equals(capabilities.redEyeReduction, 'controllable',
+ 'redEyeReduction');
+
+ assert_equals(capabilities.imageHeight.max, mockCapabilities.height.max);
+ assert_equals(capabilities.imageHeight.min, mockCapabilities.height.min);
+ assert_equals(capabilities.imageHeight.step, mockCapabilities.height.step);
+
+ assert_equals(capabilities.imageWidth.max, mockCapabilities.width.max);
+ assert_equals(capabilities.imageWidth.min, mockCapabilities.width.min);
+ assert_equals(capabilities.imageWidth.step, mockCapabilities.width.step);
+
+ assert_array_equals(capabilities.fillLightMode, [ 'auto', 'flash' ],
+ 'fillLightMode');
+
+}, 'exercises ImageCapture.getPhotoCapabilities()');
+
+promise_test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+ videoTrack.stop();
+
+ let capturer = new ImageCapture(videoTrack);
+ assert_equals(videoTrack.readyState, 'ended');
+
+ return promise_rejects_dom(t, 'InvalidStateError', capturer.getPhotoCapabilities())
+
+}, 'getPhotoCapabilities() of an ended Track should throw "InvalidStateError"');
+
+async_test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+
+ let capturer = new ImageCapture(videoTrack);
+ capturer.getPhotoCapabilities()
+ .then(t.step_func_done(() => assert_unreached('should throw "OperationError"')))
+ .catch(t.step_func_done(e => assert_equals(e.name, 'OperationError')))
+ videoTrack.stop();
+}, 'throw "OperationError" when the MediaStreamTrack is stopped while getting photo capabilities');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/getPhotoSettings.html b/testing/web-platform/tests/mediacapture-image/getPhotoSettings.html
new file mode 100644
index 0000000000..4540373559
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/getPhotoSettings.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+const fillLightModeNames = ['off', 'auto', 'flash'];
+
+// This test verifies that ImageCapture can call getPhotoSettings(), with a
+// mock Mojo interface implementation.
+image_capture_test(async (t, imageCaptureTest) => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+
+ let mockState = imageCaptureTest.mockImageCapture().state();
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ let settings = await capturer.getPhotoSettings();
+ assert_equals(settings.imageWidth, mockState.width.current, 'width');
+ assert_equals(settings.imageHeight, mockState.height.current, 'height');
+ // TODO(mcasas): check the remaining two entries https://crbug.com/732521.
+
+}, 'exercises ImageCapture.getPhotoSettings()');
+
+promise_test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+ videoTrack.stop();
+
+ let capturer = new ImageCapture(videoTrack);
+ assert_equals(videoTrack.readyState, 'ended');
+
+ return promise_rejects_dom(t, 'InvalidStateError', capturer.getPhotoSettings())
+
+}, 'getPhotoSettings() of an ended Track should throw "InvalidStateError"');
+
+async_test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+
+ let capturer = new ImageCapture(videoTrack);
+ capturer.getPhotoSettings()
+ .then(t.step_func_done(() => assert_unreached('should throw "OperationError"')))
+ .catch(t.step_func_done(e => assert_equals(e.name, 'OperationError')))
+ videoTrack.stop();
+}, 'throw "OperationError" when the MediaStreamTrack is stopped while getting photo settings');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/getusermedia.https.html b/testing/web-platform/tests/mediacapture-image/getusermedia.https.html
new file mode 100644
index 0000000000..033501cf64
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/getusermedia.https.html
@@ -0,0 +1,26 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>getUserMedia</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+"use strict";
+
+ [
+ { video: { pan: { min: 1 } } },
+ { video: { pan: { max: 1 } } },
+ { video: { pan: { exact: 1 } } },
+ { video: { tilt: { min: 1 } } },
+ { video: { tilt: { max: 1 } } },
+ { video: { tilt: { exact: 1 } } },
+ { video: { zoom: { min: 1 } } },
+ { video: { zoom: { max: 1 } } },
+ { video: { zoom: { exact: 1 } } }
+ ].forEach(constraints =>
+ promise_test(t => {
+ const promise = navigator.mediaDevices.getUserMedia(constraints);
+ return promise_rejects_js(t, TypeError, promise);
+ }, `getUserMedia(${JSON.stringify(constraints)}) must fail with TypeError`)
+ );
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/idlharness.window.js b/testing/web-platform/tests/mediacapture-image/idlharness.window.js
new file mode 100644
index 0000000000..2977138647
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/idlharness.window.js
@@ -0,0 +1,25 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: timeout=long
+
+// https://w3c.github.io/mediacapture-image/
+
+'use strict';
+
+idl_test(
+ ['image-capture'],
+ ['mediacapture-streams', 'html', 'dom'],
+ idl_array => {
+ idl_array.add_objects({
+ ImageCapture : ['capture'],
+ });
+
+ const canvas = document.createElement('canvas');
+ document.body.appendChild(canvas);
+ const context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+ const track = canvas.captureStream().getVideoTracks()[0];
+ self.capture = new ImageCapture(track);
+ }
+);
diff --git a/testing/web-platform/tests/mediacapture-image/resources/imagecapture-helpers.js b/testing/web-platform/tests/mediacapture-image/resources/imagecapture-helpers.js
new file mode 100644
index 0000000000..8f142cff41
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/resources/imagecapture-helpers.js
@@ -0,0 +1,55 @@
+'use strict';
+
+// These tests rely on the User Agent providing an implementation of
+// platform image capture backends.
+//
+// In Chromium-based browsers this implementation is provided by a polyfill
+// in order to reduce the amount of test-only code shipped to users. To enable
+// these tests the browser must be run with these options:
+//
+// --enable-blink-features=MojoJS,MojoJSTest
+
+async function loadChromiumResources() {
+ await import('/resources/chromium/mock-imagecapture.js');
+}
+
+async function initialize_image_capture_tests() {
+ if (typeof ImageCaptureTest === 'undefined') {
+ const script = document.createElement('script');
+ script.src = '/resources/test-only-api.js';
+ script.async = false;
+ const p = new Promise((resolve, reject) => {
+ script.onload = () => { resolve(); };
+ script.onerror = e => { reject(e); };
+ })
+ document.head.appendChild(script);
+ await p;
+
+ if (isChromiumBased) {
+ await loadChromiumResources();
+ }
+ }
+ assert_implements(ImageCaptureTest, 'ImageCaptureTest is unavailable');
+ let imageCaptureTest = new ImageCaptureTest();
+ await imageCaptureTest.initialize();
+ return imageCaptureTest;
+}
+
+function image_capture_test(func, name, properties) {
+ promise_test(async (t) => {
+ let imageCaptureTest = await initialize_image_capture_tests();
+ try {
+ await func(t, imageCaptureTest);
+ } finally {
+ await imageCaptureTest.reset();
+ };
+ }, name, properties);
+}
+
+function assert_point2d_array_approx_equals(actual, expected, epsilon) {
+ assert_equals(actual.length, expected.length, 'length');
+ for (var i = 0; i < actual.length; ++i) {
+ assert_approx_equals(actual[i].x, expected[i].x, epsilon, 'x');
+ assert_approx_equals(actual[i].y, expected[i].y, epsilon, 'y');
+ }
+}
diff --git a/testing/web-platform/tests/mediacapture-image/setOptions-reject.html b/testing/web-platform/tests/mediacapture-image/setOptions-reject.html
new file mode 100644
index 0000000000..4deee97d7b
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/setOptions-reject.html
@@ -0,0 +1,51 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+let canvas = document.getElementById('canvas');
+let context = canvas.getContext('2d');
+context.fillStyle = 'red';
+context.fillRect(0, 0, 10, 10);
+
+// This test verifies that ImageCapture.takePhoto() rejects if any passed
+// option is unsupported or outside its allowed range.
+function makePromiseTest(getOption) {
+ image_capture_test(async (t, imageCaptureTest) => {
+ imageCaptureTest.mockImageCapture().state().redEyeReduction = 0;
+
+ let stream = canvas.captureStream();
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ await capturer.getPhotoCapabilities();
+ const options = getOption(imageCaptureTest.mockImageCapture().state());
+
+ try {
+ await capturer.takePhoto(options);
+ assert_unreached('expected takePhoto to reject');
+ } catch (error) {
+ assert_equals(error.name, 'NotSupportedError');
+ }
+ });
+}
+
+const optionsGenerators = [
+ capabilities => ({ redEyeReduction: true }),
+ capabilities => ({ imageHeight: capabilities.height.max + 1 }),
+ capabilities => ({ imageHeight: capabilities.height.min - 1 }),
+ capabilities => ({ imageWidth: capabilities.width.max + 1 }),
+ capabilities => ({ imageWidth: capabilities.width.min - 1 }),
+ capabilities => ({ fillLightMode: 'off' }),
+];
+
+for (key in optionsGenerators) {
+ generate_tests(
+ makePromiseTest,
+ [[ 'ImageCapture.takePhoto(options) rejects with bad options, #' + key,
+ optionsGenerators[key] ]]);
+}
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/takePhoto-with-PhotoSettings.html b/testing/web-platform/tests/mediacapture-image/takePhoto-with-PhotoSettings.html
new file mode 100644
index 0000000000..5870861245
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/takePhoto-with-PhotoSettings.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+const fillLightModeNames = ['off', 'auto', 'flash'];
+
+// This test verifies that ImageCapture can call takePhoto with a PhotoSettings
+// argument, with a mock Mojo interface implementation.
+
+image_capture_test(async (t, imageCaptureTest) => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+
+ const optionsDict = { imageWidth : 1080,
+ imageHeight : 100,
+ redEyeReduction : true,
+ fillLightMode : 'flash'
+ };
+
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ let blob = await capturer.takePhoto(optionsDict);
+
+ // JS Blob is almost-opaque, can only check |type| and |size|.
+ assert_equals(blob.type, 'image/cat');
+ assert_equals(blob.size, 2);
+
+
+ assert_equals(true, imageCaptureTest.mockImageCapture().options().hasWidth,
+ 'hasWidth');
+ assert_equals(optionsDict.imageWidth,
+ imageCaptureTest.mockImageCapture().options().width,'width');
+ assert_equals(true, imageCaptureTest.mockImageCapture().options().hasHeight,
+ 'hasHeight');
+ assert_equals(optionsDict.imageHeight,
+ imageCaptureTest.mockImageCapture().options().height,
+ 'height');
+
+ // Depending on how mojo boolean packing in integers is arranged, this can
+ // be a number instead of a boolean, compare directly.
+ // TODO(mcasas): Revert to assert_equals() when yzshen@ has sorted it out.
+ assert_true(
+ optionsDict.redEyeReduction == imageCaptureTest.mockImageCapture().
+ options().redEyeReduction, 'redEyeReduction');
+
+ assert_equals(true,
+ imageCaptureTest.mockImageCapture().options().hasFillLightMode,
+ 'hasFillLightMode');
+ assert_equals(optionsDict.fillLightMode,
+ fillLightModeNames[
+ imageCaptureTest.mockImageCapture().options().fillLightMode],
+ 'fillLightMode');
+
+}, 'exercises ImageCapture.takePhoto(PhotoSettings dictionary)');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-image/takePhoto-without-PhotoCapabilities.https.window.js b/testing/web-platform/tests/mediacapture-image/takePhoto-without-PhotoCapabilities.https.window.js
new file mode 100644
index 0000000000..96eb253ccd
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/takePhoto-without-PhotoCapabilities.https.window.js
@@ -0,0 +1,6 @@
+promise_test(async t => {
+ const track = new MediaStreamTrackGenerator('video');
+ const capturer = new ImageCapture(track);
+ const settings = await capturer.getPhotoSettings();
+ await promise_rejects_dom(t, 'UnknownError', capturer.takePhoto(settings));
+}, 'exercise takePhoto() on a track without PhotoCapabilities');
diff --git a/testing/web-platform/tests/mediacapture-image/takePhoto.html b/testing/web-platform/tests/mediacapture-image/takePhoto.html
new file mode 100644
index 0000000000..6b27f085a8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-image/takePhoto.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
+<body>
+<canvas id='canvas' width=10 height=10/>
+</body>
+<script>
+
+// This test verifies that ImageCapture can takePhoto()s, with a mock Mojo
+// interface implementation.
+
+image_capture_test(async t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ let blob = await capturer.takePhoto();
+
+ // JS Blob is almost-opaque, can only check |type| and |size|.
+ assert_equals(blob.type, 'image/cat');
+ assert_equals(blob.size, 2);
+
+}, 'exercises ImageCapture.takePhoto()');
+
+image_capture_test(async t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+
+ let capturer = new ImageCapture(stream.getVideoTracks()[0]);
+ let blob = await capturer.takePhoto(null);
+
+ // JS Blob is almost-opaque, can only check |type| and |size|.
+ assert_equals(blob.type, 'image/cat');
+ assert_equals(blob.size, 2);
+
+}, 'exercises ImageCapture.takePhoto(null)');
+
+promise_test(t => {
+ let canvas = document.getElementById('canvas');
+ let context = canvas.getContext('2d');
+ context.fillStyle = 'red';
+ context.fillRect(0, 0, 10, 10);
+ let stream = canvas.captureStream();
+ let videoTrack = stream.getVideoTracks()[0];
+ videoTrack.stop();
+
+ let capturer = new ImageCapture(videoTrack);
+ assert_equals(videoTrack.readyState, 'ended');
+
+ return promise_rejects_dom(t, 'InvalidStateError', capturer.takePhoto())
+
+}, 'takePhoto() of an ended Track should throw "InvalidStateError"');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-audio.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-audio.https.html
new file mode 100644
index 0000000000..c2f0bfc011
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-audio.https.html
@@ -0,0 +1,97 @@
+<!doctype html>
+<html>
+
+<head>
+ <title>MediaStreamTrackGenerator</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-insertable-streams">
+</head>
+
+<body>
+ <p class="instructions">When prompted, use the accept button to give permission to use your audio and video devices.</p>
+ <h1 class="instructions">Description</h1>
+ <p class="instructions">This test checks that generating audio MediaStreamTracks works as expected.</p>
+ <audio id="audioElement" autoplay=true></audio>
+ <script src=/resources/testharness.js></script>
+ <script src=/resources/testharnessreport.js></script>
+ <script src=/resources/testdriver.js></script>
+ <script src=/resources/testdriver-vendor.js></script>
+ <script src='../mediacapture-streams/permission-helper.js'></script>
+ <script>
+
+ function makeAudioData(timestamp) {
+ const sampleRate = 30000;
+
+ let frames = sampleRate / 10;
+ let channels = 1;
+
+ // Generate a simple sin wave, so we have something.
+ let data = new Float32Array(frames*channels);
+ const hz = 100; // sound frequency
+ for (let i = 0; i < data.length; i++) {
+ const t = (i / sampleRate) * hz * (Math.PI * 2);
+ data[i] = Math.sin(t);
+ }
+
+ return new AudioData({
+ timestamp: timestamp,
+ numberOfFrames: frames,
+ numberOfChannels: channels,
+ sampleRate: sampleRate,
+ data: data,
+ format: "f32",
+ });
+ }
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator("audio");
+
+ const writer = generator.writable.getWriter();
+ await writer.write(makeAudioData(1));
+
+ assert_equals(generator.kind, "audio");
+ assert_equals(generator.readyState, "live");
+
+ t.add_cleanup(() => generator.stop());
+ }, "Tests that creating a Audio MediaStreamTrackGenerator works as expected");
+
+ promise_test(async t => {
+ assert_throws_js(TypeError, () => { new MediaStreamTrackGenerator({ kind: "invalid kind" }) });
+ }, "Creating Generator with an invalid kind throws");
+
+ promise_test(async t => {
+ await setMediaPermission();
+ const capturedStream = await navigator.mediaDevices.getUserMedia({ audio: true });
+ assert_equals(capturedStream.getAudioTracks().length, 1);
+ const upstreamTrack = capturedStream.getAudioTracks()[0];
+ t.add_cleanup(() => upstreamTrack.stop());
+
+ assert_throws_js(TypeError, () => { new MediaStreamTrackGenerator() });
+ }, "Creating Generator with a missing kind throws");
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator({ kind: "video" });
+ t.add_cleanup(() => generator.stop());
+
+ const writer = generator.writable.getWriter();
+ const data = makeAudioData(1);
+
+ writer.write(data).then(t.step_func(() => assert_unreached("Write should reject")), t.step_func(f => assert_true(f instanceof TypeError, "write rejects with a TypeError")));
+ }, "Mismatched data and generator kind throws on write.");
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator("audio");
+ t.add_cleanup(() => generator.stop());
+
+ const audioElement = document.getElementById("audioElement");
+ audioElement.srcObject = new MediaStream([generator]);
+ await audioElement.play();
+
+ const writer = generator.writable.getWriter();
+ await writer.write(makeAudioData(1));
+
+ // Wait for audio playout to actually happen.
+ await t.step_wait(() => audioElement.currentTime > 0, "audioElement played out generated track");
+ }, "Tests that audio actually flows to a connected audio element");
+ </script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-service-worker.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-service-worker.https.html
new file mode 100644
index 0000000000..389a30d0d9
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-service-worker.https.html
@@ -0,0 +1,24 @@
+<!doctype html>
+<title>Test initialize MediaStreamTrackGenerator in a service worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src='/service-workers/service-worker/resources/test-helpers.sub.js'></script>
+<script>
+'use strict';
+
+promise_test(async t => {
+ const registration = await navigator.serviceWorker.register('service-worker.js');
+ await wait_for_state(t, registration.installing, 'activated');
+ const result = new Promise((resolve, reject) => {
+ navigator.serviceWorker.addEventListener('message', (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed with error ' + e.data.error);
+ } else {
+ resolve();
+ }
+ });
+ });
+ registration.active.postMessage('hello world');
+ return result;
+}, 'A service worker is able to initialize a MediaStreamTrackGenerator without crashing');
+</script> \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-shared-worker.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-shared-worker.https.html
new file mode 100644
index 0000000000..deecfccad1
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-shared-worker.https.html
@@ -0,0 +1,22 @@
+<!doctype html>
+<title>Test initialize MediaStreamTrackGenerator in a shared worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+'use strict';
+
+promise_test(async t => {
+ const worker = new SharedWorker('shared-worker.js');
+ const result = new Promise((resolve, reject) => {
+ worker.port.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed with error ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ worker.port.postMessage('Hello world');
+ return result;
+}, 'A shared worker is able to initialize a MediaStreamTrackGenerator without crashing');
+</script> \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-worker.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-worker.https.html
new file mode 100644
index 0000000000..e0a8f2fc27
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-in-worker.https.html
@@ -0,0 +1,39 @@
+<!doctype html>
+<title>Test creation of MediaStreamTrackGenerator in a worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+'use strict';
+
+function initWorker(){
+ const worker = new Worker('dedicated-worker.js');
+ const result = new Promise((resolve, reject) => {
+ worker.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed with error ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ return [worker,result];
+}
+
+promise_test(async t => {
+ const [worker,result] = initWorker();
+ worker.postMessage({msg: 'Hello there'});
+ return result;
+}, 'A worker is able to initialize a MediaStreamTrackGenerator without crashing');
+
+promise_test(async t => {
+ const [worker,result] = initWorker();
+ worker.postMessage({enable: true});
+ return result;
+}, 'A worker is able to enable a MediaStreamTrackGenerator without crashing');
+
+promise_test(async t => {
+ const [worker,result] = initWorker();
+ worker.postMessage({enable: false});
+ return result;
+}, 'A worker is able to disable a MediaStreamTrackGenerator without crashing');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-pipes-data-in-worker.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-pipes-data-in-worker.https.html
new file mode 100644
index 0000000000..61a6f038c4
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-pipes-data-in-worker.https.html
@@ -0,0 +1,41 @@
+<!doctype html>
+<title>Test piping data through MediaStreamTrackGenerator in a worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script id="workerCode" type="javascript/worker">
+self.onmessage = (e) => {
+ try {
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ e.data.readable.pipeTo(generator.writable);
+ self.postMessage({result: 'Success'});
+ } catch (e) {
+ self.postMessage({result: 'Failure', error: e});
+ }
+}
+</script>
+<script>
+'use strict';
+
+promise_test(async t => {
+ const workerBlob = new Blob([document.querySelector('#workerCode').textContent],
+ { type: "text/javascript" });
+ const workerUrl = window.URL.createObjectURL(workerBlob);
+ const worker = new Worker(workerUrl);
+ window.URL.revokeObjectURL(workerUrl);
+ const result = new Promise((resolve, reject) => {
+ worker.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed with error ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
+ const track = stream.getVideoTracks()[0];
+ const processor = new MediaStreamTrackProcessor({ track: track });
+ worker.postMessage({ readable: processor.readable },
+ [processor.readable]);
+ return result;
+}, 'A worker is able to pipe data through a MediaStreamTrackGenerator without crashing');
+</script> \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-video.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-video.https.html
new file mode 100644
index 0000000000..a6f73f009c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackGenerator-video.https.html
@@ -0,0 +1,285 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>MediaStream Insertable Streams - Video</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/webrtc/RTCPeerConnection-helper.js"></script>
+</head>
+<body>
+ <p class="instructions">When prompted, use the accept button to give permission to use your audio and video devices.</p>
+ <h1 class="instructions">Description</h1>
+ <p class="instructions">This test checks that generating video MediaStreamTracks works as expected.</p>
+ <script>
+
+ const pixelColour = [50, 100, 150, 255];
+ const height = 240;
+ const width = 320;
+ function makeVideoFrame(timestamp) {
+ const canvas = new OffscreenCanvas(width, height);
+
+ const ctx = canvas.getContext('2d', {alpha: false});
+ ctx.fillStyle = `rgba(${pixelColour.join()})`;
+ ctx.fillRect(0, 0, width, height);
+
+ return new VideoFrame(canvas, {timestamp, alpha: 'discard'});
+ }
+
+ async function getVideoFrame() {
+ const stream = await getNoiseStream({video: true});
+ const input_track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor(input_track);
+ const reader = processor.readable.getReader();
+ const result = await reader.read();
+ input_track.stop();
+ return result.value;
+ }
+
+ function assertPixel(t, bytes, expected, epsilon = 5) {
+ for (let i = 0; i < bytes.length; i++) {
+ t.step(() => {
+ assert_less_than(Math.abs(bytes[i] - expected[i]), epsilon, "Mismatched pixel");
+ });
+ }
+ }
+
+ async function initiateSingleTrackCall(t, track, output) {
+ const caller = new RTCPeerConnection();
+ t.add_cleanup(() => caller.close());
+ const callee = new RTCPeerConnection();
+ t.add_cleanup(() => callee.close());
+ caller.addTrack(track);
+ t.add_cleanup(() => track.stop());
+
+ exchangeIceCandidates(caller, callee);
+ // Wait for the first track.
+ const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
+ output.srcObject = new MediaStream([e.track]);
+ // Exchange answer.
+ await exchangeAnswer(caller, callee);
+ await waitForConnectionStateChange(callee, ['connected']);
+ }
+
+ promise_test(async t => {
+ const videoFrame = await getVideoFrame();
+ const originalWidth = videoFrame.displayWidth;
+ const originalHeight = videoFrame.displayHeight;
+ const originalTimestamp = videoFrame.timestamp;
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ t.add_cleanup(() => generator.stop());
+
+ // Use a MediaStreamTrackProcessor as a sink for |generator| to verify
+ // that |processor| actually forwards the frames written to its writable
+ // field.
+ const processor = new MediaStreamTrackProcessor(generator);
+ const reader = processor.readable.getReader();
+ const readerPromise = new Promise(async resolve => {
+ const result = await reader.read();
+ assert_equals(result.value.displayWidth, originalWidth);
+ assert_equals(result.value.displayHeight, originalHeight);
+ assert_equals(result.value.timestamp, originalTimestamp);
+ resolve();
+ });
+
+ generator.writable.getWriter().write(videoFrame);
+
+ return readerPromise;
+ }, 'Tests that MediaStreamTrackGenerator forwards frames to sink');
+
+ promise_test(async t => {
+ const videoFrame = makeVideoFrame(1);
+ const originalWidth = videoFrame.displayWidth;
+ const originalHeight = videoFrame.displayHeight;
+ const generator = new MediaStreamTrackGenerator({ kind: 'video' });
+ t.add_cleanup(() => generator.stop());
+
+ const video = document.createElement("video");
+ video.autoplay = true;
+ video.width = 320;
+ video.height = 240;
+ video.srcObject = new MediaStream([generator]);
+ video.play();
+
+ // Wait for the video element to be connected to the generator and
+ // generate the frame.
+ video.onloadstart = () => generator.writable.getWriter().write(videoFrame);
+
+ return new Promise((resolve)=> {
+ video.ontimeupdate = t.step_func(() => {
+ const canvas = document.createElement("canvas");
+ canvas.width = originalWidth;
+ canvas.height = originalHeight;
+ const context = canvas.getContext('2d');
+ context.drawImage(video, 0, 0);
+ // Pick a pixel in the centre of the video and check that it has the colour of the frame provided.
+ const pixel = context.getImageData(videoFrame.displayWidth/2, videoFrame.displayHeight/2, 1, 1);
+ assertPixel(t, pixel.data, pixelColour);
+ resolve();
+ });
+ });
+ }, 'Tests that frames are actually rendered correctly in a stream used for a video element.');
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ t.add_cleanup(() => generator.stop());
+
+ // Write frames for the duration of the test.
+ const writer = generator.writable.getWriter();
+ let timestamp = 0;
+ const intervalId = setInterval(
+ t.step_func(async () => {
+ if (generator.readyState === 'live') {
+ timestamp++;
+ await writer.write(makeVideoFrame(timestamp));
+ }
+ }),
+ 40);
+ t.add_cleanup(() => clearInterval(intervalId));
+
+ const video = document.createElement('video');
+ video.autoplay = true;
+ video.width = width;
+ video.height = height;
+ video.muted = true;
+
+ await initiateSingleTrackCall(t, generator, video);
+
+ return new Promise(resolve => {
+ video.ontimeupdate = t.step_func(() => {
+ const canvas = document.createElement('canvas');
+ canvas.width = width;
+ canvas.height = height;
+ const context = canvas.getContext('2d');
+ context.drawImage(video, 0, 0);
+ // Pick a pixel in the centre of the video and check that it has the
+ // colour of the frame provided.
+ const pixel = context.getImageData(width / 2, height / 2, 1, 1);
+ // Encoding/decoding can add noise, so increase the threshhold to 8.
+ assertPixel(t, pixel.data, pixelColour, 8);
+ resolve();
+ });
+ });
+ }, 'Tests that frames are actually rendered correctly in a stream sent over a peer connection.');
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ t.add_cleanup(() => generator.stop());
+
+ const inputCanvas = new OffscreenCanvas(width, height);
+
+ const inputContext = inputCanvas.getContext('2d', {alpha: false});
+ // draw four quadrants
+ const colorUL = [255, 0, 0, 255];
+ inputContext.fillStyle = `rgba(${colorUL.join()})`;
+ inputContext.fillRect(0, 0, width / 2, height / 2);
+ const colorUR = [255, 255, 0, 255];
+ inputContext.fillStyle = `rgba(${colorUR.join()})`;
+ inputContext.fillRect(width / 2, 0, width / 2, height / 2);
+ const colorLL = [0, 255, 0, 255];
+ inputContext.fillStyle = `rgba(${colorLL.join()})`;
+ inputContext.fillRect(0, height / 2, width / 2, height / 2);
+ const colorLR = [0, 255, 255, 255];
+ inputContext.fillStyle = `rgba(${colorLR.join()})`;
+ inputContext.fillRect(width / 2, height / 2, width / 2, height / 2);
+
+ // Write frames for the duration of the test.
+ const writer = generator.writable.getWriter();
+ let timestamp = 0;
+ const intervalId = setInterval(
+ t.step_func(async () => {
+ if (generator.readyState === 'live') {
+ timestamp++;
+ await writer.write(new VideoFrame(
+ inputCanvas, {timestamp: timestamp, alpha: 'discard'}));
+ }
+ }),
+ 40);
+ t.add_cleanup(() => clearInterval(intervalId));
+
+ const caller = new RTCPeerConnection();
+ t.add_cleanup(() => caller.close());
+ const callee = new RTCPeerConnection();
+ t.add_cleanup(() => callee.close());
+ const sender = caller.addTrack(generator);
+
+ exchangeIceCandidates(caller, callee);
+ // Wait for the first track.
+ const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
+
+ // Exchange answer.
+ await exchangeAnswer(caller, callee);
+ await waitForConnectionStateChange(callee, ['connected']);
+ const params = sender.getParameters();
+ params.encodings.forEach(e => e.scaleResolutionDownBy = 2);
+ sender.setParameters(params);
+
+ const processor = new MediaStreamTrackProcessor(e.track);
+ const reader = processor.readable.getReader();
+
+ // The first frame may not have had scaleResolutionDownBy applied
+ const numTries = 5;
+ for (let i = 1; i <= numTries; i++) {
+ const {value: outputFrame} = await reader.read();
+ if (outputFrame.displayWidth !== width / 2) {
+ assert_less_than(i, numTries, `First ${numTries} frames were the wrong size.`);
+ outputFrame.close();
+ continue;
+ }
+
+ assert_equals(outputFrame.displayWidth, width / 2);
+ assert_equals(outputFrame.displayHeight, height / 2);
+
+ const outputCanvas = new OffscreenCanvas(width / 2, height / 2);
+ const outputContext = outputCanvas.getContext('2d', {alpha: false});
+ outputContext.drawImage(outputFrame, 0, 0);
+ outputFrame.close();
+ // Check the four quadrants
+ const pixelUL = outputContext.getImageData(width / 8, height / 8, 1, 1);
+ assertPixel(t, pixelUL.data, colorUL);
+ const pixelUR =
+ outputContext.getImageData(width * 3 / 8, height / 8, 1, 1);
+ assertPixel(t, pixelUR.data, colorUR);
+ const pixelLL =
+ outputContext.getImageData(width / 8, height * 3 / 8, 1, 1);
+ assertPixel(t, pixelLL.data, colorLL);
+ const pixelLR =
+ outputContext.getImageData(width * 3 / 8, height * 3 / 8, 1, 1);
+ assertPixel(t, pixelLR.data, colorLR);
+ break;
+ }
+ }, 'Tests that frames are sent correctly with RTCRtpEncodingParameters.scaleResolutionDownBy.');
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator("video");
+ t.add_cleanup(() => generator.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ await writer.write(frame);
+
+ assert_equals(generator.kind, "video");
+ assert_equals(generator.readyState, "live");
+ }, "Tests that creating a Video MediaStreamTrackGenerator works as expected");
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator("video");
+ t.add_cleanup(() => generator.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ await writer.write(frame);
+
+ assert_throws_dom("InvalidStateError", () => frame.clone(), "VideoFrame wasn't destroyed on write.");
+ }, "Tests that VideoFrames are destroyed on write.");
+
+ promise_test(async t => {
+ const generator = new MediaStreamTrackGenerator("audio");
+ t.add_cleanup(() => generator.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ assert_throws_js(TypeError, writer.write(frame));
+ }, "Mismatched frame and generator kind throws on write.");
+ </script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-audio.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-audio.https.html
new file mode 100644
index 0000000000..449b4833a2
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-audio.https.html
@@ -0,0 +1,54 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaStreamTrackProcessor</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-insertable-streams">
+</head>
+<body>
+<p class="instructions">When prompted, use the accept button to give permission to use your audio and video devices.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that MediaStreamTrackProcessor works as expected on audio MediaStreamTracks.</p>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src='../mediacapture-streams/permission-helper.js'></script>
+<script>
+promise_test(async t => {
+ await setMediaPermission("granted", ["microphone"]);
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true});
+ const track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor({track: track});
+ const reader = processor.readable.getReader();
+ const readResult = await reader.read();
+ assert_false(readResult.done)
+ assert_true(readResult.value instanceof AudioData);
+ readResult.value.close();
+ track.stop();
+ return reader.closed;
+}, "Tests that the reader of an audio MediaStreamTrackProcessor produces AudioData objects and is closed on track stop");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true});
+ const track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor({track: track});
+ const worker = new Worker('MediaStreamTrackProcessor-worker.js');
+ const promise = new Promise(resolve => {
+ worker.onmessage = t.step_func(msg => {
+ if (msg.data instanceof AudioData) {
+ msg.data.close();
+ track.stop();
+ } else if (msg.data == 'closed') {
+ resolve();
+ } else {
+ assert_unreached();
+ }
+ })
+ });
+ worker.postMessage({readable: processor.readable},
+ [processor.readable]);
+ return promise;
+}, "Tests that the reader of an audio MediaStreamTrackProcessor produces AudioData objects and is closed on track stop while running on a worker");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-backpressure.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-backpressure.https.html
new file mode 100644
index 0000000000..7b4f88e944
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-backpressure.https.html
@@ -0,0 +1,69 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaStreamTrackProcessor backpressure</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-insertable-streams">
+</head>
+<body>
+ <h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that MediaStreamTrackProcessor handles backpressure from a WHATWG stream pipeline.</p>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script>
+
+ const height = 240;
+ const width = 320;
+
+ const inputCanvas = new OffscreenCanvas(width, height);
+ const inputCtx = inputCanvas.getContext('2d', {alpha: false});
+ inputCtx.fillStyle = 'black';
+ inputCtx.fillRect(0, 0, width, height);
+
+ const frameDuration = 40;
+
+ function makeUniformVideoFrame(timestamp) {
+ return new VideoFrame(inputCanvas, {timestamp, alpha: 'discard'});
+ }
+
+ promise_test(async t => {
+ // TODO: use "new VideoTrackGenerator"
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ t.add_cleanup(() => generator.stop());
+
+ // Write frames for the duration of the test.
+ const writer = generator.writable.getWriter();
+ let timestamp = 0;
+ const intervalId = setInterval(
+ t.step_func(async () => {
+ if (generator.readyState === 'live') {
+ timestamp++;
+ await writer.write(makeUniformVideoFrame(timestamp));
+ }
+ }),
+ frameDuration);
+ t.add_cleanup(() => clearInterval(intervalId));
+ t.step_timeout(function() {
+ clearInterval(intervalId);
+ generator.stop();
+ }, 2000);
+ const processor = new MediaStreamTrackProcessor({track: generator});
+ let ts = 1;
+ await processor.readable.pipeTo(new WritableStream({
+ async write(frame) {
+ if (ts === 1) {
+ assert_equals(frame.timestamp, ts, "Timestamp mismatch");
+ } else {
+ assert_greater_than_equal(frame.timestamp, ts, "Backpressure should have resulted in skipping at least 3 frames");
+ }
+ frame.close();
+ ts+=3;
+ // Wait the equivalent of 3 frames
+ return new Promise((res) => t.step_timeout(res, 3*frameDuration));
+ }
+ }));
+ }, "Tests that backpressure forces MediaStreamTrackProcess to skip frames");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-video.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-video.https.html
new file mode 100644
index 0000000000..4182505ada
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-video.https.html
@@ -0,0 +1,97 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaStreamTrackProcessor</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-insertable-streams">
+</head>
+<body>
+<p class="instructions">When prompted, use the accept button to give permission to use your audio and video devices.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that MediaStreamTrackProcessor works as expected on video MediaStreamTracks.</p>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src='../mediacapture-streams/permission-helper.js'></script>
+<script>
+promise_test(async t => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ const track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor({track: track});
+ const reader = processor.readable.getReader();
+ const readResult = await reader.read();
+ assert_false(readResult.done)
+ assert_true(readResult.value instanceof VideoFrame);
+ readResult.value.close();
+ track.stop();
+ return reader.closed;
+}, "Tests that the reader of a video MediaStreamTrackProcessor produces video frames and is closed on track stop");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ const track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor({track: track});
+ const worker = new Worker('MediaStreamTrackProcessor-worker.js');
+ const promise = new Promise(resolve => {
+ worker.onmessage = t.step_func(msg => {
+ if (msg.data instanceof VideoFrame) {
+ msg.data.close();
+ track.stop();
+ } else if (msg.data == 'closed') {
+ resolve();
+ } else {
+ assert_unreached();
+ }
+ })
+ });
+ worker.postMessage({readable: processor.readable},
+ [processor.readable]);
+ return promise;
+}, "Tests that the reader of a video MediaStreamTrackProcessor produces VideoFrame objects and is closed on track stop while running on a worker");
+
+function makeVideoFrame(timestamp) {
+ const canvas = new OffscreenCanvas(100, 100);
+ const ctx = canvas.getContext('2d');
+ return new VideoFrame(canvas, {timestamp});
+}
+
+promise_test(async t => {
+ // The generator will be used as the source for the processor to
+ // produce frames in a controlled manner.
+ const generator = new MediaStreamTrackGenerator('video');
+ t.add_cleanup(() => generator.stop());
+ // Use a larger maxBufferSize than the default to ensure no frames
+ // will be dropped.
+ const processor = new MediaStreamTrackProcessor({track: generator, maxBufferSize:10});
+ const reader = processor.readable.getReader();
+ const writer = generator.writable.getWriter();
+
+ let numReads = 0;
+ let resolve = null;
+ const promise = new Promise(r => resolve = r);
+
+ const numOperations = 4;
+ // Issue reads without waiting for the frames to arrive.
+ for (let i = 0; i < numOperations; i++) {
+ reader.read().then(dv=> {
+ dv.value.close();
+ if (++numReads == numOperations)
+ resolve();
+ });
+ }
+
+ // Write video frames in different tasks to "slowly" settle the pending read
+ // requests.
+ for (let i = 0; i<numOperations; i++) {
+ await writer.write(makeVideoFrame(i));
+ await new Promise(r=>setTimeout(r,0));
+ }
+
+ return promise;
+
+}, "Tests that multiple read requests are eventually settled");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-worker.js b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-worker.js
new file mode 100644
index 0000000000..51eaef80a9
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/MediaStreamTrackProcessor-worker.js
@@ -0,0 +1,17 @@
+onmessage = async msg => {
+ const reader = msg.data.readable.getReader();
+ let readResult = await reader.read();
+ postMessage(readResult.value);
+ readResult.value.close();
+ // Continue reading until the stream is done due to a track.stop()
+ while (true) {
+ readResult = await reader.read();
+ if (readResult.done) {
+ break;
+ } else {
+ readResult.value.close();
+ }
+ }
+ await reader.closed;
+ postMessage('closed');
+}
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/VideoTrackGenerator.https.html b/testing/web-platform/tests/mediacapture-insertable-streams/VideoTrackGenerator.https.html
new file mode 100644
index 0000000000..2c81c7604a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/VideoTrackGenerator.https.html
@@ -0,0 +1,327 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>MediaStream Insertable Streams - VideoTrackGenerator</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="../webrtc/RTCPeerConnection-helper.js"></script>
+</head>
+<body>
+ <p class="instructions">If prompted, use the accept button to give permission to use your audio and video devices.</p>
+ <h1 class="instructions">Description</h1>
+ <p class="instructions">This test checks that generating video MediaStreamTracks from VideoTrackGenerator works as expected.</p>
+ <script>
+
+ const pixelColour = [50, 100, 150, 255];
+ const height = 240;
+ const width = 320;
+ function makeVideoFrame(timestamp) {
+ const canvas = new OffscreenCanvas(width, height);
+
+ const ctx = canvas.getContext('2d', {alpha: false});
+ ctx.fillStyle = `rgba(${pixelColour.join()})`;
+ ctx.fillRect(0, 0, width, height);
+
+ return new VideoFrame(canvas, {timestamp, alpha: 'discard'});
+ }
+
+ async function getVideoFrame() {
+ const stream = await getNoiseStream({video: true});
+ const input_track = stream.getTracks()[0];
+ const processor = new MediaStreamTrackProcessor(input_track);
+ const reader = processor.readable.getReader();
+ const result = await reader.read();
+ input_track.stop();
+ return result.value;
+ }
+
+ function assertPixel(t, bytes, expected, epsilon = 5) {
+ for (let i = 0; i < bytes.length; i++) {
+ t.step(() => {
+ assert_less_than(Math.abs(bytes[i] - expected[i]), epsilon, "Mismatched pixel");
+ });
+ }
+ }
+
+ async function initiateSingleTrackCall(t, track, output) {
+ const caller = new RTCPeerConnection();
+ t.add_cleanup(() => caller.close());
+ const callee = new RTCPeerConnection();
+ t.add_cleanup(() => callee.close());
+ caller.addTrack(track);
+ t.add_cleanup(() => track.stop());
+
+ exchangeIceCandidates(caller, callee);
+ // Wait for the first track.
+ const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
+ output.srcObject = new MediaStream([e.track]);
+ // Exchange answer.
+ await exchangeAnswer(caller, callee);
+ await waitForConnectionStateChange(callee, ['connected']);
+ }
+
+ promise_test(async t => {
+ const videoFrame = await getVideoFrame();
+ const originalWidth = videoFrame.displayWidth;
+ const originalHeight = videoFrame.displayHeight;
+ const originalTimestamp = videoFrame.timestamp;
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ // Use a MediaStreamTrackProcessor as a sink for |generator| to verify
+ // that |processor| actually forwards the frames written to its writable
+ // field.
+ const processor = new MediaStreamTrackProcessor(generator);
+ const reader = processor.readable.getReader();
+ const readerPromise = new Promise(async resolve => {
+ const result = await reader.read();
+ assert_equals(result.value.displayWidth, originalWidth);
+ assert_equals(result.value.displayHeight, originalHeight);
+ assert_equals(result.value.timestamp, originalTimestamp);
+ resolve();
+ });
+
+ generator.writable.getWriter().write(videoFrame);
+ return readerPromise;
+ }, 'Tests that VideoTrackGenerator forwards frames to sink');
+
+ promise_test(async t => {
+ const videoFrame = makeVideoFrame(1);
+ const originalWidth = videoFrame.displayWidth;
+ const originalHeight = videoFrame.displayHeight;
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ const video = document.createElement("video");
+ video.autoplay = true;
+ video.width = 320;
+ video.height = 240;
+ video.srcObject = new MediaStream([generator.track]);
+ video.play();
+
+ // Wait for the video element to be connected to the generator and
+ // generate the frame.
+ video.onloadstart = () => generator.writable.getWriter().write(videoFrame);
+
+ return new Promise((resolve)=> {
+ video.ontimeupdate = t.step_func(() => {
+ const canvas = document.createElement("canvas");
+ canvas.width = originalWidth;
+ canvas.height = originalHeight;
+ const context = canvas.getContext('2d');
+ context.drawImage(video, 0, 0);
+ // Pick a pixel in the centre of the video and check that it has the colour of the frame provided.
+ const pixel = context.getImageData(videoFrame.displayWidth/2, videoFrame.displayHeight/2, 1, 1);
+ assertPixel(t, pixel.data, pixelColour);
+ resolve();
+ });
+ });
+ }, 'Tests that frames are actually rendered correctly in a stream used for a video element.');
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ // Write frames for the duration of the test.
+ const writer = generator.writable.getWriter();
+ let timestamp = 0;
+ const intervalId = setInterval(
+ t.step_func(async () => {
+ if (generator.track.readyState === 'live') {
+ timestamp++;
+ await writer.write(makeVideoFrame(timestamp));
+ }
+ }),
+ 40);
+ t.add_cleanup(() => clearInterval(intervalId));
+
+ const video = document.createElement('video');
+ video.autoplay = true;
+ video.width = width;
+ video.height = height;
+ video.muted = true;
+
+ await initiateSingleTrackCall(t, generator.track, video);
+
+ return new Promise(resolve => {
+ video.ontimeupdate = t.step_func(() => {
+ const canvas = document.createElement('canvas');
+ canvas.width = width;
+ canvas.height = height;
+ const context = canvas.getContext('2d');
+ context.drawImage(video, 0, 0);
+ // Pick a pixel in the centre of the video and check that it has the
+ // colour of the frame provided.
+ const pixel = context.getImageData(width / 2, height / 2, 1, 1);
+ // Encoding/decoding can add noise, so increase the threshhold to 8.
+ assertPixel(t, pixel.data, pixelColour, 8);
+ resolve();
+ });
+ });
+ }, 'Tests that frames are actually rendered correctly in a stream sent over a peer connection.');
+
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ const inputCanvas = new OffscreenCanvas(width, height);
+
+ const inputContext = inputCanvas.getContext('2d', {alpha: false});
+ // draw four quadrants
+ const colorUL = [255, 0, 0, 255];
+ inputContext.fillStyle = `rgba(${colorUL.join()})`;
+ inputContext.fillRect(0, 0, width / 2, height / 2);
+ const colorUR = [255, 255, 0, 255];
+ inputContext.fillStyle = `rgba(${colorUR.join()})`;
+ inputContext.fillRect(width / 2, 0, width / 2, height / 2);
+ const colorLL = [0, 255, 0, 255];
+ inputContext.fillStyle = `rgba(${colorLL.join()})`;
+ inputContext.fillRect(0, height / 2, width / 2, height / 2);
+ const colorLR = [0, 255, 255, 255];
+ inputContext.fillStyle = `rgba(${colorLR.join()})`;
+ inputContext.fillRect(width / 2, height / 2, width / 2, height / 2);
+
+ // Write frames for the duration of the test.
+ const writer = generator.writable.getWriter();
+ let timestamp = 0;
+ const intervalId = setInterval(
+ t.step_func(async () => {
+ if (generator.track.readyState === 'live') {
+ timestamp++;
+ await writer.write(new VideoFrame(
+ inputCanvas, {timestamp: timestamp, alpha: 'discard'}));
+ }
+ }),
+ 40);
+ t.add_cleanup(() => clearInterval(intervalId));
+
+ const caller = new RTCPeerConnection();
+ t.add_cleanup(() => caller.close());
+ const callee = new RTCPeerConnection();
+ t.add_cleanup(() => callee.close());
+ const sender = caller.addTrack(generator.track);
+
+ exchangeIceCandidates(caller, callee);
+ // Wait for the first track.
+ const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
+
+ // Exchange answer.
+ await exchangeAnswer(caller, callee);
+ await waitForConnectionStateChange(callee, ['connected']);
+ const params = sender.getParameters();
+ params.encodings.forEach(e => e.scaleResolutionDownBy = 2);
+ sender.setParameters(params);
+
+ const processor = new MediaStreamTrackProcessor(e.track);
+ const reader = processor.readable.getReader();
+
+ // The first frame may not have had scaleResolutionDownBy applied
+ const numTries = 5;
+ for (let i = 1; i <= numTries; i++) {
+ const {value: outputFrame} = await reader.read();
+ if (outputFrame.displayWidth !== width / 2) {
+ assert_less_than(i, numTries, `First ${numTries} frames were the wrong size.`);
+ outputFrame.close();
+ continue;
+ }
+
+ assert_equals(outputFrame.displayWidth, width / 2);
+ assert_equals(outputFrame.displayHeight, height / 2);
+
+ const outputCanvas = new OffscreenCanvas(width / 2, height / 2);
+ const outputContext = outputCanvas.getContext('2d', {alpha: false});
+ outputContext.drawImage(outputFrame, 0, 0);
+ outputFrame.close();
+ // Check the four quadrants
+ const pixelUL = outputContext.getImageData(width / 8, height / 8, 1, 1);
+ assertPixel(t, pixelUL.data, colorUL);
+ const pixelUR =
+ outputContext.getImageData(width * 3 / 8, height / 8, 1, 1);
+ assertPixel(t, pixelUR.data, colorUR);
+ const pixelLL =
+ outputContext.getImageData(width / 8, height * 3 / 8, 1, 1);
+ assertPixel(t, pixelLL.data, colorLL);
+ const pixelLR =
+ outputContext.getImageData(width * 3 / 8, height * 3 / 8, 1, 1);
+ assertPixel(t, pixelLR.data, colorLR);
+ break;
+ }
+ }, 'Tests that frames are sent correctly with RTCRtpEncodingParameters.scaleResolutionDownBy.');
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ await writer.write(frame);
+
+ assert_equals(generator.track.kind, "video");
+ assert_equals(generator.track.readyState, "live");
+ }, "Tests that creating a VideoTrackGenerator works as expected");
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ await writer.write(frame);
+
+ assert_throws_dom("InvalidStateError", () => frame.clone(), "VideoFrame wasn't destroyed on write.");
+ }, "Tests that VideoFrames are destroyed on write.");
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ const writer = generator.writable.getWriter();
+ const frame = makeVideoFrame(1);
+ assert_throws_js(TypeError, writer.write(frame));
+ }, "Mismatched frame and generator kind throws on write.");
+
+ promise_test(async t => {
+ const generator = new VideoTrackGenerator();
+ t.add_cleanup(() => generator.track.stop());
+
+ // Use a MediaStreamTrackProcessor as a sink for |generator| to verify
+ // that |processor| actually forwards the frames written to its writable
+ // field.
+ const processor = new MediaStreamTrackProcessor(generator.track);
+ const reader = processor.readable.getReader();
+ const videoFrame = makeVideoFrame(1);
+
+ const writer = generator.writable.getWriter();
+ const videoFrame1 = makeVideoFrame(1);
+ writer.write(videoFrame1);
+ const result1 = await reader.read();
+ assert_equals(result1.value.timestamp, 1);
+ generator.muted = true;
+
+ // This frame is expected to be discarded.
+ const videoFrame2 = makeVideoFrame(2);
+ writer.write(videoFrame2);
+ generator.muted = false;
+
+ const videoFrame3 = makeVideoFrame(3);
+ writer.write(videoFrame3);
+ const result3 = await reader.read();
+ assert_equals(result3.value.timestamp, 3);
+
+ // Set up a read ahead of time, then mute, enqueue and unmute.
+ const promise5 = reader.read();
+ generator.muted = true;
+ writer.write(makeVideoFrame(4)); // Expected to be discarded.
+ generator.muted = false;
+ writer.write(makeVideoFrame(5));
+ const result5 = await promise5;
+ assert_equals(result5.value.timestamp, 5);
+ }, 'Tests that VideoTrackGenerator forwards frames only when unmuted');
+
+ // Note - tests for mute/unmute events will be added once
+ // https://github.com/w3c/mediacapture-transform/issues/81 is resolved
+
+ </script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/dedicated-worker.js b/testing/web-platform/tests/mediacapture-insertable-streams/dedicated-worker.js
new file mode 100644
index 0000000000..0dbcc32d0b
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/dedicated-worker.js
@@ -0,0 +1,11 @@
+self.onmessage = (e) => {
+ try {
+ const mstg = new MediaStreamTrackGenerator({kind: 'video'});
+ if ('enable' in e.data) {
+ mstg.enabled = e.data.enable;
+ }
+ self.postMessage({result: 'Success'});
+ } catch (e) {
+ self.postMessage({result: 'Failure', error: e});
+ }
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/service-worker.js b/testing/web-platform/tests/mediacapture-insertable-streams/service-worker.js
new file mode 100644
index 0000000000..05a8b99ad8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/service-worker.js
@@ -0,0 +1,8 @@
+self.addEventListener('message', (event) => {
+ try {
+ const mstg = new MediaStreamTrackGenerator({ kind: 'video' });
+ event.source.postMessage({ result: 'Success' });
+ } catch (e) {
+ event.source.postMessage({ result: 'Failure', error: e });
+ };
+}); \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-insertable-streams/shared-worker.js b/testing/web-platform/tests/mediacapture-insertable-streams/shared-worker.js
new file mode 100644
index 0000000000..61ff67bcff
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-insertable-streams/shared-worker.js
@@ -0,0 +1,11 @@
+onconnect = (e) => {
+ const port = e.ports[0];
+ port.onmessage = (e) => {
+ try {
+ const generator = new MediaStreamTrackGenerator({kind: 'video'});
+ port.postMessage({result: 'Success'});
+ } catch (e) {
+ port.postMessage({result: 'Failure', error: e});
+ }
+ }
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-record/BlobEvent-constructor.html b/testing/web-platform/tests/mediacapture-record/BlobEvent-constructor.html
new file mode 100644
index 0000000000..66dc3404d7
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/BlobEvent-constructor.html
@@ -0,0 +1,38 @@
+<!doctype html>
+<title>BlobEvent constructor</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#blob-event">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+test(function() {
+ assert_equals(BlobEvent.length, 2);
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type");
+ });
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type", null);
+ });
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type", undefined);
+ });
+}, "The BlobEventInit dictionary is required");
+
+test(function() {
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type", {});
+ });
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type", { data: null });
+ });
+ assert_throws_js(TypeError, function() {
+ new BlobEvent("type", { data: undefined });
+ });
+}, "The BlobEventInit dictionary's data member is required.");
+
+test(function() {
+ var blob = new Blob();
+ var event = new BlobEvent("type", { data: blob });
+ assert_equals(event.type, "type");
+ assert_equals(event.data, blob);
+}, "The BlobEvent instance's data attribute is set.");
+</script>
diff --git a/testing/web-platform/tests/mediacapture-record/META.yml b/testing/web-platform/tests/mediacapture-record/META.yml
new file mode 100644
index 0000000000..d59e5e3084
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/META.yml
@@ -0,0 +1,3 @@
+spec: https://w3c.github.io/mediacapture-record/
+suggested_reviewers:
+ - yellowdoge
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-bitrate.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-bitrate.https.html
new file mode 100644
index 0000000000..d89f739033
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-bitrate.https.html
@@ -0,0 +1,230 @@
+<!doctype html>
+<html>
+<head>
+<title>MediaRecorder {audio|video}bitsPerSecond attributes</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="../mediacapture-streams/permission-helper.js"></script>
+</head>
+<script>
+
+/*
+ * The bitrate handling is difficult to test, given that the spec uses text such
+ * as: "values the User Agent deems reasonable" and "such that the sum of
+ * videoBitsPerSecond and audioBitsPerSecond is close to the value of recorder’s
+ * [[ConstrainedBitsPerSecond]] slot". For cases like that this test tries to
+ * use values that are reasonable for the tested track types. Should a UA vendor
+ * see a need to update this to fit their definition of reasonable, they should
+ * feel free to do so, doing their best to avoid regressing existing compliant
+ * implementations.
+ */
+
+async function getStream(t, constraints) {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia(constraints);
+ const tracks = stream.getTracks();
+ t.add_cleanup(() => tracks.forEach(tr => tr.stop()));
+ return stream;
+}
+
+function getAudioStream(t) {
+ return getStream(t, {audio: true});
+}
+
+function getVideoStream(t) {
+ return getStream(t, {video: true});
+}
+
+function getAudioVideoStream(t) {
+ return getStream(t, {audio: true, video: true});
+}
+
+const AUDIO_BITRATE = 1e5; // 100kbps
+const VIDEO_BITRATE = 1e6; // 1Mbps
+const LOW_TOTAL_BITRATE = 5e5; // 500kbps
+const HIGH_TOTAL_BITRATE = 2e6; // 2Mbps
+const BITRATE_EPSILON = 1e5; // 100kbps
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t));
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing no bitrate config results in defaults");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ bitsPerSecond: 0,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+ assert_approx_equals(rec.audioBitsPerSecond + rec.videoBitsPerSecond, 0,
+ BITRATE_EPSILON);
+}, "Passing bitsPerSecond:0 results in targets close to 0");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ audioBitsPerSecond: 0,
+ });
+ assert_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing only audioBitsPerSecond:0 results in 0 for audio, default for video");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ videoBitsPerSecond: 0,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_equals(rec.videoBitsPerSecond, 0);
+}, "Passing only videoBitsPerSecond:0 results in 0 for video, default for audio");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ bitsPerSecond: 0,
+ audioBitsPerSecond: AUDIO_BITRATE,
+ videoBitsPerSecond: VIDEO_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+ assert_approx_equals(rec.audioBitsPerSecond + rec.videoBitsPerSecond, 0,
+ BITRATE_EPSILON);
+}, "Passing bitsPerSecond:0 overrides audio/video-specific values");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ audioBitsPerSecond: 0,
+ videoBitsPerSecond: 0,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+ assert_approx_equals(rec.audioBitsPerSecond + rec.videoBitsPerSecond,
+ HIGH_TOTAL_BITRATE, BITRATE_EPSILON);
+}, "Passing bitsPerSecond overrides audio/video zero values");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+ assert_approx_equals(rec.audioBitsPerSecond + rec.videoBitsPerSecond,
+ HIGH_TOTAL_BITRATE, BITRATE_EPSILON);
+}, "Passing bitsPerSecond sets audio/video bitrate values");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ audioBitsPerSecond: AUDIO_BITRATE,
+ });
+ assert_equals(rec.audioBitsPerSecond, AUDIO_BITRATE);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing only audioBitsPerSecond results in default for video");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ videoBitsPerSecond: VIDEO_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_equals(rec.videoBitsPerSecond, VIDEO_BITRATE);
+}, "Passing only videoBitsPerSecond results in default for audio");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioStream(t), {
+ videoBitsPerSecond: VIDEO_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_equals(rec.videoBitsPerSecond, VIDEO_BITRATE);
+}, "Passing videoBitsPerSecond for audio-only stream still results in something for video");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getVideoStream(t), {
+ audioBitsPerSecond: AUDIO_BITRATE,
+ });
+ assert_equals(rec.audioBitsPerSecond, AUDIO_BITRATE);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing audioBitsPerSecond for video-only stream still results in something for audio");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing bitsPerSecond for audio-only stream still results in something for video");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getVideoStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ });
+ assert_not_equals(rec.audioBitsPerSecond, 0);
+ assert_not_equals(rec.videoBitsPerSecond, 0);
+}, "Passing bitsPerSecond for video-only stream still results in something for audio");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t));
+ t.add_cleanup(() => rec.stop());
+ const abps = rec.audioBitsPerSecond;
+ const vbps = rec.videoBitsPerSecond;
+ rec.start();
+ assert_equals(rec.audioBitsPerSecond, abps);
+ assert_equals(rec.videoBitsPerSecond, vbps);
+}, "Selected default track bitrates are not changed by start()");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ audioBitsPerSecond: AUDIO_BITRATE,
+ videoBitsPerSecond: VIDEO_BITRATE,
+ });
+ t.add_cleanup(() => rec.stop());
+ const abps = rec.audioBitsPerSecond;
+ const vbps = rec.videoBitsPerSecond;
+ rec.start();
+ assert_equals(rec.audioBitsPerSecond, abps);
+ assert_equals(rec.videoBitsPerSecond, vbps);
+}, "Passed-in track bitrates are not changed by start()");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioVideoStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ });
+ t.add_cleanup(() => rec.stop());
+ const abps = rec.audioBitsPerSecond;
+ const vbps = rec.videoBitsPerSecond;
+ rec.start();
+ assert_equals(rec.audioBitsPerSecond, abps);
+ assert_equals(rec.videoBitsPerSecond, vbps);
+}, "Passing bitsPerSecond for audio/video stream does not change track bitrates in start()");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getAudioStream(t), {
+ bitsPerSecond: LOW_TOTAL_BITRATE,
+ });
+ t.add_cleanup(() => rec.stop());
+ const abps = rec.audioBitsPerSecond;
+ const vbps = rec.videoBitsPerSecond;
+ rec.start();
+ assert_approx_equals(rec.audioBitsPerSecond, LOW_TOTAL_BITRATE,
+ BITRATE_EPSILON);
+ assert_equals(rec.videoBitsPerSecond, 0);
+ assert_not_equals(rec.audioBitsPerSecond, abps);
+ assert_not_equals(rec.videoBitsPerSecond, vbps);
+}, "Passing bitsPerSecond for audio stream sets video track bitrate to 0 in start()");
+
+promise_test(async t => {
+ const rec = new MediaRecorder(await getVideoStream(t), {
+ bitsPerSecond: HIGH_TOTAL_BITRATE,
+ });
+ t.add_cleanup(() => rec.stop());
+ const abps = rec.audioBitsPerSecond;
+ const vbps = rec.videoBitsPerSecond;
+ rec.start();
+ assert_equals(rec.audioBitsPerSecond, 0);
+ assert_approx_equals(rec.videoBitsPerSecond, HIGH_TOTAL_BITRATE,
+ BITRATE_EPSILON);
+ assert_not_equals(rec.audioBitsPerSecond, abps);
+ assert_not_equals(rec.videoBitsPerSecond, vbps);
+}, "Passing bitsPerSecond for video stream sets audio track bitrate to 0 in start()");
+</script>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-canvas-media-source.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-canvas-media-source.https.html
new file mode 100644
index 0000000000..187015f42e
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-canvas-media-source.https.html
@@ -0,0 +1,128 @@
+<!doctype html>
+<html>
+<meta name="timeout" content="long">
+
+<head>
+ <title>MediaRecorder canvas media source</title>
+ <link rel="help"
+ href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-mimeType">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/resources/testdriver.js"></script>
+ <script src="/resources/testdriver-vendor.js"></script>
+ <script src="../mediacapture-streams/permission-helper.js"></script>
+</head>
+
+<body>
+ <canvas id="canvas"></canvas>
+ <script>
+
+async_test(test => {
+ const CANVAS_WIDTH = 256;
+ const CANVAS_HEIGHT = 144;
+
+ // Empty video frames from this resolution consistently have ~750 bytes in my
+ // tests, while valid video frames usually contain 7-8KB. A threshold of
+ // 1.5KB consistently fails when video frames are empty but passes when video
+ // frames are non-empty.
+ const THRESHOLD_FOR_EMPTY_FRAMES = 1500;
+
+ const CAMERA_CONSTRAINTS = {
+ video: {
+ width: { ideal: CANVAS_WIDTH },
+ height: { ideal: CANVAS_HEIGHT }
+ }
+ };
+
+ function useUserMedia(constraints) {
+ let activeStream = null;
+
+ function startCamera() {
+ return navigator.mediaDevices.getUserMedia(constraints).then(
+ (stream) => {
+ activeStream = stream;
+ return stream;
+ }
+ );
+ }
+
+ function stopCamera() {
+ activeStream?.getTracks().forEach((track) => track.stop());
+ }
+
+ return { startCamera, stopCamera };
+ }
+
+ function useMediaRecorder(stream, frameSizeCallback) {
+ const mediaRecorder = new MediaRecorder(
+ stream,
+ {}
+ );
+
+ mediaRecorder.ondataavailable = event => {
+ const {size} = event.data;
+ frameSizeCallback(size);
+
+ if (mediaRecorder.state !== "inactive") {
+ mediaRecorder.stop();
+ }
+ };
+
+ mediaRecorder.start(1000);
+ }
+
+ const canvas = document.querySelector("canvas");
+ const ctx = canvas.getContext("2d", {
+ alpha: false,
+ });
+
+ canvas.width = CANVAS_WIDTH;
+ canvas.height = CANVAS_HEIGHT;
+
+ const {startCamera, stopCamera} = useUserMedia(CAMERA_CONSTRAINTS);
+ startCamera().then(async stream => {
+ const videoTrack = stream.getVideoTracks()[0];
+ const { readable: readableStream } = new MediaStreamTrackProcessor({
+ track: videoTrack
+ });
+
+ const composedTrackGenerator = new MediaStreamTrackGenerator({
+ kind: "video"
+ });
+ const sink = composedTrackGenerator.writable;
+
+ ctx.fillStyle = "#333";
+ ctx.fillRect(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
+
+ const transformer = new TransformStream({
+ async transform(cameraFrame, controller) {
+ if (cameraFrame && cameraFrame?.codedWidth > 0) {
+ const leftPos = (CANVAS_WIDTH - cameraFrame.displayWidth) / 2;
+ const topPos = (CANVAS_HEIGHT - cameraFrame.displayHeight) / 2;
+
+ ctx.drawImage(cameraFrame, leftPos, topPos);
+
+ const newFrame = new VideoFrame(canvas, {
+ timestamp: cameraFrame.timestamp
+ });
+ cameraFrame.close();
+ controller.enqueue(newFrame);
+ }
+ }
+ });
+
+ readableStream.pipeThrough(transformer).pipeTo(sink);
+
+ const compositedMediaStream = new MediaStream([composedTrackGenerator]);
+
+ useMediaRecorder(compositedMediaStream, test.step_func_done(size => {
+ assert_greater_than(size, THRESHOLD_FOR_EMPTY_FRAMES);
+ stopCamera();
+ }));
+ });
+}, "MediaRecorder returns frames containing video content");
+
+ </script>
+</body>
+
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-creation.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-creation.https.html
new file mode 100644
index 0000000000..d2190c3ee5
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-creation.https.html
@@ -0,0 +1,59 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Creation</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+<script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src='../mediacapture-streams/permission-helper.js'></script>
+</head>
+<script>
+ // This test verifies that MediaRecorder can be created with different Media
+ // Stream Track combinations: 1 Video Track only, 1 Audio Track only and finally
+ // a Media Stream with both a Video and an Audio Track. Note that recording is
+ // _not_ started in this test, see MediaRecorder-audio-video.html for it.
+
+ function makeAsyncTest(constraints, verifyStream, message) {
+ async_test(function(test) {
+
+ const gotStream = test.step_func(function(stream) {
+ verifyStream(stream);
+
+ var recorder = new MediaRecorder(stream);
+ assert_equals(recorder.state, "inactive");
+ assert_not_equals(recorder.videoBitsPerSecond, 0);
+ assert_not_equals(recorder.audioBitsPerSecond, 0);
+ test.done();
+ });
+
+ const onError = test.unreached_func('Error creating MediaStream.');
+ setMediaPermission().then(() => navigator.mediaDevices.getUserMedia(constraints)).then(gotStream, onError);
+ }, message);
+ }
+
+ function verifyVideoOnlyStream(stream) {
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+ assert_equals(stream.getVideoTracks()[0].readyState, 'live');
+ }
+ function verifyAudioOnlyStream(stream) {
+ assert_equals(stream.getAudioTracks().length, 1);
+ assert_equals(stream.getVideoTracks().length, 0);
+ assert_equals(stream.getAudioTracks()[0].readyState, 'live');
+ }
+ function verifyAudioVideoStream(stream) {
+ assert_equals(stream.getAudioTracks().length, 1);
+ assert_equals(stream.getVideoTracks().length, 1);
+ assert_equals(stream.getVideoTracks()[0].readyState, 'live');
+ assert_equals(stream.getAudioTracks()[0].readyState, 'live');
+ }
+
+ // Note: webkitGetUserMedia() must be called with at least video or audio true.
+ makeAsyncTest({video:true}, verifyVideoOnlyStream, 'Video-only MediaRecorder');
+ makeAsyncTest({audio:true}, verifyAudioOnlyStream, 'Audio-only MediaRecorder');
+ makeAsyncTest({audio:true, video:true}, verifyAudioVideoStream, 'Video+Audio MediaRecorder');
+
+</script>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-destroy-script-execution.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-destroy-script-execution.html
new file mode 100644
index 0000000000..3e9add3c61
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-destroy-script-execution.html
@@ -0,0 +1,79 @@
+<!doctype html>
+<meta charset="utf-8">
+<html>
+<title>MediaRecorder destroy script execution context</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<body>
+<iframe src="support/MediaRecorder-iframe.html" id="subFrame-start" name="subFrameStart"></iframe>
+<iframe src="support/MediaRecorder-iframe.html" id="subFrame-stop" name="subFrameStop"></iframe>
+<iframe src="support/MediaRecorder-iframe.html" id="subFrame-allTrackEnded" name="subFrameAllTrackEnded"></iframe>
+<iframe src="support/MediaRecorder-iframe.html" id="subFrame-audioBitrateMode" name="subFrameAudioBitrateMode"></iframe>
+<script>
+ var iframeForCallingStart = document.getElementById('subFrame-start');
+ var iframeForCallingStop = document.getElementById('subFrame-stop');
+ var iframeForAllTrackEnded = document.getElementById('subFrame-allTrackEnded');
+ var iframeForAudioBitrateMode = document.getElementById('subFrame-audioBitrateMode');
+
+ var testForCallingStart = async_test('MediaRecorder will throw when start() is called and the script execution context is going away');
+ var testForCallingStop = async_test('MediaRecorder will not fire the stop event when stop() is called and the script execution context is going away');
+ var testForAllTrackEnded = async_test('MediaRecorder will not fire the stop event when all tracks are ended and the script execution context is going away');
+ var testForAudioBitrateMode = async_test('MediaRecorder will not crash on accessing audioBitrateMode when the script execution context is going away');
+
+
+ iframeForCallingStart.onload = function(e) {
+ let testWindow = subFrameStart.window;
+ testWindow.prepareForTest(testForCallingStart);
+ let exceptionCtor = testWindow.DOMException;
+ const recorder = subFrameStart.window.recorder;
+ iframeForCallingStart.remove();
+ testForCallingStart.step(function() {
+ assert_throws_dom('NotSupportedError', exceptionCtor, () => recorder.start(),
+ "MediaRecorder.start() should throw");
+ });;
+ testForCallingStart.done();
+ };
+
+ iframeForCallingStop.onload = function(e) {
+ subFrameStop.window.prepareForTest(testForCallingStop);
+ const recorder = subFrameStop.window.recorder;
+ recorder.ondataavailable = testForCallingStop.step_func(blobEvent => {
+ iframeForCallingStop.remove();
+ testForCallingStop.step_timeout(testForCallingStop.step_func_done(), 0);
+ });
+ recorder.onstop = testForCallingStop.unreached_func('Unexpected stop event');
+ recorder.start();
+ testForCallingStop.step(function() {
+ assert_equals(recorder.state, 'recording', 'MediaRecorder has been started successfully');
+ });
+ subFrameStop.window.control.addVideoFrame();
+ recorder.stop();
+ };
+
+ iframeForAllTrackEnded.onload = function(e) {
+ subFrameAllTrackEnded.window.prepareForTest(testForAllTrackEnded);
+ const recorder = subFrameAllTrackEnded.window.recorder;
+ recorder.ondataavailable = testForAllTrackEnded.step_func(blobEvent => {
+ iframeForAllTrackEnded.remove();
+ testForAllTrackEnded.step_timeout(testForAllTrackEnded.step_func_done(), 0);
+ });
+ recorder.onstop = testForAllTrackEnded.unreached_func('Unexpected stop event');
+ recorder.start();
+ testForAllTrackEnded.step(function() {
+ assert_equals(recorder.state, 'recording', 'MediaRecorder has been started successfully');
+ });
+ subFrameAllTrackEnded.window.control.addVideoFrame();
+ subFrameAllTrackEnded.window.video.getVideoTracks()[0].stop();
+ };
+
+ iframeForAudioBitrateMode.onload = testForAudioBitrateMode.step_func(function(e) {
+ subFrameAudioBitrateMode.window.prepareForTest(testForAudioBitrateMode);
+ const recorder = subFrameAudioBitrateMode.window.recorder;
+ iframeForAudioBitrateMode.remove();
+ recorder.audioBitrateMode;
+ testForAudioBitrateMode.done();
+ });
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-detached-context.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-detached-context.html
new file mode 100644
index 0000000000..f8a8699ad9
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-detached-context.html
@@ -0,0 +1,26 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Detached Context</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<script>
+ async_test(t => {
+ const frame = document.body.appendChild(document.createElement('iframe'));
+ const recorderFunc = frame.contentWindow.MediaRecorder;
+ frame.remove();
+
+ try {
+ new recorderFunc(new MediaStream);
+ } catch (err) {
+ assert_equals(err.name, 'NotAllowedError');
+ t.done();
+ }
+ assert_unreached('MediaRecorder should have failed');
+ }, 'MediaRecorder creation with detached context');
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-disabled-tracks.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-disabled-tracks.https.html
new file mode 100644
index 0000000000..ea64673264
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-disabled-tracks.https.html
@@ -0,0 +1,56 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Disabled Tracks</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+<script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src='../mediacapture-streams/permission-helper.js'></script>
+</head>
+<script>
+
+ // This test verifies that MediaStream with disabled tracks can be correctly
+ // recorded. See crbug.com/878255 for more context.
+
+ [ ["video-only", {video: true, audio: false}],
+ ["audio-only", {video: false, audio: true}],
+ ["audio-video", {video: true, audio: true}]]
+ .forEach( function(args) {
+ async_test(function(test) {
+ let recorder;
+ const recorderOnDataAvailable = test.step_func(function(event) {
+ if (recorder.state != "recording")
+ return;
+
+ recorder.onstop = recorderOnStopExpected;
+ recorder.stop();
+ });
+
+ const recorderOnStopExpected = test.step_func_done();
+ const recorderOnStopUnexpected = test.unreached_func('Recording stopped.');
+ const recorderOnError = test.unreached_func('Recording error.');
+
+ const gotStream = test.step_func(function(stream) {
+ for (track of stream.getTracks())
+ track.enabled = false;
+
+ recorder = new MediaRecorder(stream);
+
+ assert_equals(recorder.state, "inactive");
+ recorder.ondataavailable = recorderOnDataAvailable;
+ recorder.onstop = recorderOnStopUnexpected;
+ recorder.onerror = recorderOnError;
+ recorder.start();
+
+ assert_equals(recorder.state, "recording");
+ recorder.requestData();
+ });
+
+ const onError = test.unreached_func('Error creating MediaStream.');
+ setMediaPermission().then(() => navigator.mediaDevices.getUserMedia(args[1])).then(gotStream, onError);
+ }, args[0]);
+ });
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-error.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-error.html
new file mode 100644
index 0000000000..54e83ecac7
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-error.html
@@ -0,0 +1,62 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Error</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="utils/sources.js"></script>
+</head>
+<body>
+<script>
+ async_test(t => {
+ const {stream: video, control} = createVideoStream(t);
+ const {stream: audio} = createAudioStream(t);
+ const recorder = new MediaRecorder(video);
+ recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
+ assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
+ assert_equals(mediaRecorderErrorEvent.error.name, 'InvalidModificationError', 'the type of error should be InvalidModificationError when track has been added or removed');
+ assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
+ assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after adding a track to stream");
+ t.done();
+ });
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ video.addTrack(audio.getAudioTracks()[0]);
+ control.addVideoFrame();
+ t.step_timeout(() => {
+ assert_unreached("error event is not fired after 2 seconds");
+ }, 2000);
+ }, "MediaRecorder will stop recording when any of track is added and error event will be fired");
+
+ async_test(t => {
+ const {stream: video, control} = createVideoStream(t);
+ const recorder = new MediaRecorder(video);
+ recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
+ assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
+ assert_equals(mediaRecorderErrorEvent.error.name, 'InvalidModificationError', 'the type of error should be InvalidModificationError when track has been added or removed');
+ assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
+ assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after removing a track from stream");
+ t.done();
+ });
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ video.removeTrack(video.getVideoTracks()[0]);
+ control.addVideoFrame();
+ t.step_timeout(() => {
+ assert_unreached("error event is not fired after 2 seconds");
+ }, 2000);
+ }, "MediaRecorder will stop recording when any of track is removed and error event will be fired");
+
+ test(t => {
+ const {stream: video} = createVideoStream(t);
+ const recorder = new MediaRecorder(video);
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ assert_throws_dom("InvalidStateError", function() {
+ recorder.start();
+ });
+ }, "MediaRecorder cannot start recording when MediaRecorder' state is not inactive and an InvalidStateError should be thrown");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-events-and-exceptions.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-events-and-exceptions.html
new file mode 100644
index 0000000000..0a377991ba
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-events-and-exceptions.html
@@ -0,0 +1,108 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder events and exceptions</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="utils/sources.js"></script>
+</head>
+<body>
+<script>
+
+ // This test exercises the MediaRecorder API event sequence:
+ // onStart -> onPause -> onResume -> onDataAvailable -> onStop
+ // verifying the |state| and a few exceptions that are supposed to be thrown
+ // when doing the wrong thing.
+
+ async_test(test => {
+
+ recorderOnUnexpectedEvent = test.step_func(() => {
+ assert_unreached('Unexpected event.');
+ });
+
+ recorderOnDataAvailable = test.step_func(event => {
+ assert_equals(recorder.state, "inactive");
+ assert_not_equals(event.data.size, 0, 'We should get a Blob with data');
+ });
+
+ recorderOnStop = test.step_func(function() {
+ assert_equals(recorder.state, "inactive");
+ recorder.onstop = recorderOnUnexpectedEvent;
+ recorder.stop();
+ assert_equals(recorder.state, "inactive", "stop() is idempotent");
+ assert_throws_dom("InvalidStateError", function() { recorder.pause() },
+ "recorder cannot be pause()ed in |inactive| state");
+ assert_throws_dom("InvalidStateError", function() { recorder.resume() },
+ "recorder cannot be resume()d in |inactive| state");
+ assert_throws_dom("InvalidStateError", function() { recorder.requestData() },
+ "cannot requestData() if recorder is in |inactive| state");
+ test.done();
+ });
+
+ recorderOnResume = test.step_func(function() {
+ assert_equals(recorder.state, "recording");
+ recorder.onresume = recorderOnUnexpectedEvent;
+ recorder.onstop = recorderOnStop;
+ recorder.stop();
+ });
+
+ recorderOnPause = test.step_func(function() {
+ assert_equals(recorder.state, "paused");
+ recorder.onpause = recorderOnUnexpectedEvent;
+ recorder.onresume = recorderOnResume;
+ recorder.resume();
+ });
+
+ recorderOnStart = test.step_func(function() {
+ assert_equals(recorder.state, "recording");
+ recorder.onstart = recorderOnUnexpectedEvent;
+ recorder.onpause = recorderOnPause;
+ recorder.pause();
+ });
+
+ const {stream, control} = createVideoStream(test);
+ assert_equals(stream.getAudioTracks().length, 0);
+ assert_equals(stream.getVideoTracks().length, 1);
+ assert_equals(stream.getVideoTracks()[0].readyState, 'live');
+
+ assert_throws_dom("NotSupportedError",
+ function() {
+ new MediaRecorder(
+ new MediaStream(), {mimeType : "video/invalid"});
+ },
+ "recorder should throw() with unsupported mimeType");
+ const recorder = new MediaRecorder(new MediaStream());
+ assert_equals(recorder.state, "inactive");
+
+ recorder.stop();
+ assert_equals(recorder.state, "inactive", "stop() is idempotent");
+ assert_throws_dom("InvalidStateError", function(){recorder.pause()},
+ "recorder cannot be pause()ed in |inactive| state");
+ assert_throws_dom("InvalidStateError", function(){recorder.resume()},
+ "recorder cannot be resume()d in |inactive| state");
+ assert_throws_dom("InvalidStateError", function(){recorder.requestData()},
+ "cannot requestData() if recorder is in |inactive| state");
+
+ assert_throws_dom("NotSupportedError",
+ function() {
+ recorder.start();
+ },
+ "recorder should throw() when starting with inactive stream");
+
+ recorder.stream.addTrack(stream.getTracks()[0]);
+
+ control.addVideoFrame();
+
+ recorder.onstop = recorderOnUnexpectedEvent;
+ recorder.onpause = recorderOnUnexpectedEvent;
+ recorder.onresume = recorderOnUnexpectedEvent;
+ recorder.onerror = recorderOnUnexpectedEvent;
+ recorder.ondataavailable = recorderOnDataAvailable;
+ recorder.onstart = recorderOnStart;
+
+ recorder.start();
+ assert_equals(recorder.state, "recording");
+ });
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-mimetype.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-mimetype.html
new file mode 100644
index 0000000000..07721abfd4
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-mimetype.html
@@ -0,0 +1,205 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder mimeType</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-mimeType">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="utils/sources.js"></script>
+</head>
+<body>
+<script>
+test(t => {
+ const recorder = new MediaRecorder(createAudioStream(t).stream);
+ assert_equals(recorder.mimeType, "",
+ "MediaRecorder has no default mimeType");
+}, "MediaRecorder sets no default mimeType in the constructor for audio");
+
+test(t => {
+ const recorder = new MediaRecorder(createVideoStream(t).stream);
+ assert_equals(recorder.mimeType, "",
+ "MediaRecorder has no default mimeType");
+}, "MediaRecorder sets no default mimeType in the constructor for video");
+
+test(t => {
+ const recorder = new MediaRecorder(createAudioVideoStream(t).stream);
+ assert_equals(recorder.mimeType, "",
+ "MediaRecorder has no default mimeType");
+}, "MediaRecorder sets no default mimeType in the constructor for audio/video");
+
+test(t => {
+ assert_throws_dom("NotSupportedError",
+ () => new MediaRecorder(new MediaStream(), {mimeType: "audio/banana"}));
+}, "MediaRecorder invalid audio mimeType throws");
+
+test(t => {
+ assert_false(MediaRecorder.isTypeSupported("audio/banana"));
+}, "MediaRecorder invalid audio mimeType is unsupported");
+
+test(t => {
+ assert_throws_dom("NotSupportedError",
+ () => new MediaRecorder(new MediaStream(), {mimeType: "video/pineapple"}));
+}, "MediaRecorder invalid video mimeType throws");
+
+test(t => {
+ assert_false(MediaRecorder.isTypeSupported("video/pineapple"));
+}, "MediaRecorder invalid video mimeType is unsupported");
+
+// New MIME types could be added to this list as needed.
+for (const mimeType of [
+ 'audio/mp4',
+ 'video/mp4',
+ 'audio/ogg',
+ 'audio/ogg; codecs="vorbis"',
+ 'audio/ogg; codecs="opus"',
+ 'audio/webm',
+ 'audio/webm; codecs="vorbis"',
+ 'audio/webm; codecs="opus"',
+ 'video/webm',
+ 'video/webm; codecs="vp8"',
+ 'video/webm; codecs="vp8, vorbis"',
+ 'video/webm; codecs="vp8, opus"',
+ 'video/webm; codecs="vp9"',
+ 'video/webm; codecs="vp9, vorbis"',
+ 'video/webm; codecs="vp9, opus"',
+ 'video/webm; codecs="av1"',
+ 'video/webm; codecs="av1, opus"',
+]) {
+ if (MediaRecorder.isTypeSupported(mimeType)) {
+ test(t => {
+ const recorder = new MediaRecorder(new MediaStream(), {mimeType});
+ assert_equals(recorder.mimeType, mimeType, "Supported mimeType is set");
+ }, `Supported mimeType ${mimeType} is set immediately after constructing`);
+ } else {
+ test(t => {
+ assert_throws_dom("NotSupportedError",
+ () => new MediaRecorder(new MediaStream(), {mimeType}));
+ }, `Unsupported mimeType ${mimeType} throws`);
+ }
+}
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_not_equals(recorder.mimeType, "");
+}, "MediaRecorder sets a nonempty mimeType on 'onstart' for audio");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingVideoStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_not_equals(recorder.mimeType, "");
+}, "MediaRecorder sets a nonempty mimeType on 'onstart' for video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioVideoStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_not_equals(recorder.mimeType, "");
+}, "MediaRecorder sets a nonempty mimeType on 'onstart' for audio/video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioStream(t).stream);
+ recorder.start();
+ assert_equals(recorder.mimeType, "");
+}, "MediaRecorder mimeType is not set before 'onstart' for audio");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingVideoStream(t).stream);
+ recorder.start();
+ assert_equals(recorder.mimeType, "");
+}, "MediaRecorder mimeType is not set before 'onstart' for video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioVideoStream(t).stream);
+ recorder.start();
+ assert_equals(recorder.mimeType, "");
+}, "MediaRecorder mimeType is not set before 'onstart' for audio/video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioStream(t).stream);
+ const onstartPromise = new Promise(resolve => {
+ recorder.onstart = () => {
+ recorder.onstart = () => t.step_func(() => {
+ assert_not_reached("MediaRecorder doesn't fire 'onstart' twice");
+ });
+ resolve();
+ }
+ });
+ recorder.start();
+ await onstartPromise;
+ await new Promise(r => t.step_timeout(r, 1000));
+}, "MediaRecorder doesn't fire 'onstart' multiple times for audio");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingVideoStream(t).stream);
+ const onstartPromise = new Promise(resolve => {
+ recorder.onstart = () => {
+ recorder.onstart = () => t.step_func(() => {
+ assert_not_reached("MediaRecorder doesn't fire 'onstart' twice");
+ });
+ resolve();
+ }
+ });
+ recorder.start();
+ await onstartPromise;
+ await new Promise(r => t.step_timeout(r, 1000));
+}, "MediaRecorder doesn't fire 'onstart' multiple times for video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioVideoStream(t).stream);
+ const onstartPromise = new Promise(resolve => {
+ recorder.onstart = () => {
+ recorder.onstart = () => t.step_func(() => {
+ assert_not_reached("MediaRecorder doesn't fire 'onstart' twice");
+ });
+ resolve();
+ }
+ });
+ recorder.start();
+ await onstartPromise;
+ await new Promise(r => t.step_timeout(r, 1000));
+}, "MediaRecorder doesn't fire 'onstart' multiple times for audio/video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_regexp_match(recorder.mimeType, /^audio\//,
+ "mimeType has an expected media type");
+ assert_regexp_match(recorder.mimeType, /^[a-z]+\/[a-z]+/,
+ "mimeType has a container subtype");
+ assert_regexp_match(
+ recorder.mimeType, /^[a-z]+\/[a-z]+;[ ]*codecs=[^,]+$/,
+ "mimeType has one codec a");
+}, "MediaRecorder formats mimeType well after 'start' for audio");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingVideoStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_regexp_match(recorder.mimeType, /^video\//,
+ "mimeType has an expected media type");
+ assert_regexp_match(recorder.mimeType, /^[a-z]+\/[a-z]+/,
+ "mimeType has a container subtype");
+ assert_regexp_match(
+ recorder.mimeType, /^[a-z]+\/[a-z]+;[ ]*codecs=[^,]+$/,
+ "mimeType has one codec a");
+}, "MediaRecorder formats mimeType well after 'start' for video");
+
+promise_test(async t => {
+ const recorder = new MediaRecorder(createFlowingAudioVideoStream(t).stream);
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ assert_regexp_match(recorder.mimeType, /^video\//,
+ "mimeType has an expected media type");
+ assert_regexp_match(recorder.mimeType, /^[a-z]+\/[a-z]+/,
+ "mimeType has a container subtype");
+ assert_regexp_match(
+ recorder.mimeType, /^[a-z]+\/[a-z]+;[ ]*codecs=[^,]+,[^,]+$/,
+ "mimeType has two codecs");
+}, "MediaRecorder formats mimeType well after 'start' for audio/video");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-pause-resume.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-pause-resume.html
new file mode 100644
index 0000000000..a1495dcb0c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-pause-resume.html
@@ -0,0 +1,89 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Pause and Resume</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="utils/sources.js"></script>
+</head>
+<body>
+<script>
+ function recordEvents(target, events) {
+ let arr = [];
+ for (let ev of events) {
+ target.addEventListener(ev, _ => arr.push(ev));
+ }
+ return arr;
+ }
+
+ promise_test(async t => {
+ const {stream: video, control} = createVideoStream(t);
+ control.addVideoFrame();
+ const recorder = new MediaRecorder(video);
+ const events = recordEvents(recorder,
+ ["start", "stop", "dataavailable", "pause", "resume", "error"]);
+
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ await new Promise(r => recorder.onstart = r);
+
+ recorder.pause();
+ assert_equals(recorder.state, "paused", "MediaRecorder should be paused immediately following pause()");
+
+ // A second call to pause should be idempotent
+ recorder.pause();
+ assert_equals(recorder.state, "paused", "MediaRecorder should be paused immediately following pause()");
+
+ let event = await new Promise(r => recorder.onpause = r);
+ assert_equals(event.type, "pause", "the event type should be pause");
+ assert_true(event.isTrusted, "isTrusted should be true when the event is created by C++");
+
+ recorder.resume();
+ assert_equals(recorder.state, "recording", "MediaRecorder state should be recording immediately following resume() call");
+
+ // A second call to resume should be idempotent
+ recorder.resume();
+ assert_equals(recorder.state, "recording", "MediaRecorder state should be recording immediately following resume() call");
+
+ event = await new Promise(r => recorder.onresume = r);
+ assert_equals(event.type, "resume", "the event type should be resume");
+ assert_true(event.isTrusted, "isTrusted should be true when the event is created by C++");
+
+ recorder.stop();
+ await new Promise(r => recorder.onstop = r);
+
+ assert_array_equals(events, ["start", "pause", "resume", "dataavailable", "stop"],
+ "Should have gotten expected events");
+ }, "MediaRecorder handles pause() and resume() calls appropriately in state and events");
+
+ promise_test(async () => {
+ let video = createVideoStream();
+ let recorder = new MediaRecorder(video);
+ let events = recordEvents(recorder,
+ ["start", "stop", "dataavailable", "pause", "resume", "error"]);
+
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ await new Promise(r => recorder.onstart = r);
+
+ recorder.pause();
+ assert_equals(recorder.state, "paused", "MediaRecorder should be paused immediately following pause()");
+ let event = await new Promise(r => recorder.onpause = r);
+ assert_equals(event.type, "pause", "the event type should be pause");
+ assert_true(event.isTrusted, "isTrusted should be true when the event is created by C++");
+
+ recorder.stop();
+ assert_equals(recorder.state, "inactive", "MediaRecorder should be inactive after being stopped");
+ await new Promise(r => recorder.onstop = r);
+
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ await new Promise(r => recorder.onstart = r);
+
+ assert_array_equals(events, ["start", "pause", "dataavailable", "stop", "start"],
+ "Should have gotten expected events");
+ }, "MediaRecorder handles stop() in paused state appropriately");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection-no-sink.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection-no-sink.https.html
new file mode 100644
index 0000000000..106ad06059
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection-no-sink.https.html
@@ -0,0 +1,47 @@
+<!doctype html>
+<html>
+<meta name="timeout" content="long">
+
+<head>
+ <title>MediaRecorder peer connection</title>
+ <link rel="help"
+ href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-mimeType">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/resources/testdriver.js"></script>
+ <script src="/resources/testdriver-vendor.js"></script>
+ <script src="../mediacapture-streams/permission-helper.js"></script>
+ <script src="utils/peerconnection.js"></script>
+</head>
+
+<body>
+ <script>
+
+promise_setup(async () => {
+ const t = {add_cleanup: add_completion_callback};
+ const [, pc, stream] = await startConnection(t, true, true);
+ const [audio] = stream.getAudioTracks();
+ const [video] = stream.getVideoTracks();
+
+ for (const kinds of [{ audio }, { video }, { audio, video }]) {
+ const tag = `${JSON.stringify(kinds)}`;
+ const stream = new MediaStream([kinds.audio, kinds.video].filter(n => n));
+
+ promise_test(async t => {
+ const recorder = new MediaRecorder(stream);
+ recorder.start(200);
+ let combinedSize = 0;
+ // Wait for a small amount of data to appear. Kept small for mobile tests
+ while (combinedSize < 2000) {
+ const {data} = await new Promise(r => recorder.ondataavailable = r);
+ combinedSize += data.size;
+ }
+ recorder.stop();
+ }, `MediaRecorder records from PeerConnection without sinks, ${tag}`);
+ }
+});
+
+ </script>
+</body>
+
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection.https.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection.https.html
new file mode 100644
index 0000000000..86c9d4f4a2
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-peerconnection.https.html
@@ -0,0 +1,86 @@
+<!doctype html>
+<html>
+<meta name="timeout" content="long">
+
+<head>
+ <title>MediaRecorder peer connection</title>
+ <link rel="help"
+ href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-mimeType">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/resources/testdriver.js"></script>
+ <script src="/resources/testdriver-vendor.js"></script>
+ <script src="../mediacapture-streams/permission-helper.js"></script>
+ <script src="utils/peerconnection.js"></script>
+</head>
+
+<body>
+ <video id="remote" autoplay width="240" />
+ <script>
+
+promise_setup(async () => {
+ const t = {add_cleanup: add_completion_callback};
+ const [, pc, stream] = await startConnection(t, true, true);
+ const [audio] = stream.getAudioTracks();
+ const [video] = stream.getVideoTracks();
+
+ // Needed for the tests to get exercised in Chrome (bug)
+ document.getElementById('remote').srcObject = stream;
+
+ for (const {kinds, mimeType} of [
+ { kinds: { video }, mimeType: "" },
+ { kinds: { audio }, mimeType: "" },
+ { kinds: { video, audio }, mimeType: "" },
+ { kinds: { audio }, mimeType: "audio/webm;codecs=opus" },
+ { kinds: { video }, mimeType: "video/webm;codecs=vp8" },
+ { kinds: { video, audio }, mimeType: "video/webm;codecs=vp8,opus" },
+ { kinds: { video }, mimeType: "video/webm;codecs=vp9" },
+ { kinds: { video, audio }, mimeType: "video/webm;codecs=vp9,opus" }
+ ]) {
+ const tag = `${JSON.stringify(kinds)} mimeType "${mimeType}"`;
+ const stream = new MediaStream([kinds.audio, kinds.video].filter(n => n));
+
+ // Spec doesn't mandate codecs, so if not supported, test failure instead.
+ if (mimeType && !MediaRecorder.isTypeSupported(mimeType)) {
+ promise_test(async t => {
+ assert_throws_dom('NotSupportedError',
+ () => new MediaRecorder(stream, { mimeType }));
+ }, `MediaRecorder constructor throws on no support, ${tag}`);
+ continue;
+ }
+
+ promise_test(async t => {
+ const recorder = new MediaRecorder(stream, { mimeType });
+ recorder.start(200);
+ await new Promise(r => recorder.onstart = r);
+ let combinedSize = 0;
+ // Wait for a small amount of data to appear. Kept small for mobile tests
+ while (combinedSize < 2000) {
+ const {data} = await new Promise(r => recorder.ondataavailable = r);
+ combinedSize += data.size;
+ }
+ recorder.stop();
+ }, `PeerConnection MediaRecorder receives data after onstart, ${tag}`);
+
+ promise_test(async t => {
+ const clone = stream.clone();
+ const recorder = new MediaRecorder(clone, { mimeType });
+ recorder.start();
+ await new Promise(r => recorder.onstart = r);
+ await waitForReceivedFramesOrPackets(t, pc, kinds.audio, kinds.video, 10);
+ for (const track of clone.getTracks()) {
+ track.stop();
+ }
+ // As the tracks ended, expect data from the recorder.
+ await Promise.all([
+ new Promise(r => recorder.onstop = r),
+ new Promise(r => recorder.ondataavailable = r)
+ ]);
+ }, `PeerConnection MediaRecorder gets ondata on stopping tracks, ${tag}`);
+ }
+});
+
+ </script>
+</body>
+
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-start.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-start.html
new file mode 100644
index 0000000000..ef2fe69719
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-start.html
@@ -0,0 +1,25 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Start</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-start">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<canvas id="canvas" width="200" height="200">
+</canvas>
+<script>
+ function createVideoStream() {
+ canvas.getContext('2d');
+ return canvas.captureStream();
+ }
+
+ test(t => {
+ const mimeType = [ 'audio/aac', 'audio/ogg', 'audio/webm' ].find(MediaRecorder.isTypeSupported);
+ const mediaRecorder = new MediaRecorder(createVideoStream(), {mimeType});
+ assert_throws_dom("NotSupportedError", () => mediaRecorder.start());
+ }, "MediaRecorder cannot record the stream using the current configuration");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/MediaRecorder-stop.html b/testing/web-platform/tests/mediacapture-record/MediaRecorder-stop.html
new file mode 100644
index 0000000000..73eb2999ad
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/MediaRecorder-stop.html
@@ -0,0 +1,151 @@
+<!doctype html>
+<html>
+<head>
+ <title>MediaRecorder Stop</title>
+ <link rel="help" href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#mediarecorder">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="utils/sources.js"></script>
+</head>
+<body>
+<script>
+ function recordEvents(target, events) {
+ let arr = [];
+ for (let ev of events) {
+ target.addEventListener(ev, _ => arr.push(ev));
+ }
+ return arr;
+ }
+
+ // This function is used to check that elements of |actual| is a sub
+ // sequence in the |expected| sequence.
+ function assertSequenceIn(actual, expected) {
+ let i = 0;
+ for (event of actual) {
+ const j = expected.slice(i).indexOf(event);
+ assert_greater_than_equal(
+ j, 0, "Sequence element " + event + " is not included in " +
+ expected.slice(i));
+ i = j;
+ }
+ return true;
+ }
+
+ promise_test(async t => {
+ const {stream: video} = createVideoStream(t);
+ const recorder = new MediaRecorder(video);
+ const events = recordEvents(recorder,
+ ["start", "stop", "dataavailable", "pause", "resume", "error"]);
+ assert_equals(video.getVideoTracks().length, 1, "video mediastream starts with one track");
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ video.getVideoTracks()[0].stop();
+ assert_equals(recorder.state, "recording", "MediaRecorder state should be recording immediately following last track ending");
+ const event = await new Promise(r => recorder.onstop = r);
+
+ assert_equals(event.type, "stop", "the event type should be stop");
+ assert_true(event.isTrusted, "isTrusted should be true when the event is created by C++");
+ assert_equals(recorder.state, "inactive", "MediaRecorder is inactive after stop event");
+
+ // As the test is written, it's not guaranteed that
+ // onstart/ondataavailable is invoked, but it's fine if they are.
+ // The stop element is guaranteed to be in events when we get here.
+ assertSequenceIn(events, ["start", "dataavailable", "stop"]);
+ }, "MediaRecorder will stop recording and fire a stop event when all tracks are ended");
+
+ promise_test(async t => {
+ const {stream: video} = createVideoStream(t);
+ const recorder = new MediaRecorder(video);
+ const events = recordEvents(recorder,
+ ["start", "stop", "dataavailable", "pause", "resume", "error"]);
+ recorder.start();
+ assert_equals(recorder.state, "recording", "MediaRecorder has been started successfully");
+ recorder.stop();
+ assert_equals(recorder.state, "inactive", "MediaRecorder state should be inactive immediately following stop() call");
+
+ const event = await new Promise (r => recorder.onstop = r);
+ assert_equals(event.type, "stop", "the event type should be stop");
+ assert_true(event.isTrusted, "isTrusted should be true when the event is created by C++");
+ assert_equals(recorder.state, "inactive", "MediaRecorder is inactive after stop event");
+
+ // As the test is written, it's not guaranteed that
+ // onstart/ondataavailable is invoked, but it's fine if they are.
+ // The stop element is guaranteed to be in events when we get here.
+ assertSequenceIn(events, ["start", "dataavailable", "stop"]);
+ }, "MediaRecorder will stop recording and fire a stop event when stop() is called");
+
+ promise_test(async t => {
+ const recorder = new MediaRecorder(createVideoStream(t).stream);
+ recorder.stop();
+ await Promise.race([
+ new Promise((_, reject) => recorder.onstop =
+ _ => reject(new Error("onstop should never have been called"))),
+ new Promise(r => t.step_timeout(r, 0))]);
+ }, "MediaRecorder will not fire an exception when stopped after creation");
+
+ promise_test(async t => {
+ const recorder = new MediaRecorder(createVideoStream(t).stream);
+ recorder.start();
+ recorder.stop();
+ const event = await new Promise(r => recorder.onstop = r);
+ recorder.stop();
+ await Promise.race([
+ new Promise((_, reject) => recorder.onstop =
+ _ => reject(new Error("onstop should never have been called"))),
+ new Promise(r => t.step_timeout(r, 0))]);
+ }, "MediaRecorder will not fire an exception when stopped after having just been stopped");
+
+ promise_test(async t => {
+ const {stream} = createVideoStream(t);
+ const recorder = new MediaRecorder(stream);
+ recorder.start();
+ stream.getVideoTracks()[0].stop();
+ const event = await new Promise(r => recorder.onstop = r);
+ recorder.stop();
+ await Promise.race([
+ new Promise((_, reject) => recorder.onstop =
+ _ => reject(new Error("onstop should never have been called"))),
+ new Promise(r => t.step_timeout(r, 0))]);
+ }, "MediaRecorder will not fire an exception when stopped after having just been spontaneously stopped");
+
+ promise_test(async t => {
+ const {stream} = createAudioVideoStream(t);
+ const recorder = new MediaRecorder(stream);
+ const events = [];
+ const startPromise = new Promise(resolve => recorder.onstart = resolve);
+ const stopPromise = new Promise(resolve => recorder.onstop = resolve);
+
+ startPromise.then(() => events.push("start"));
+ stopPromise.then(() => events.push("stop"));
+
+ recorder.start();
+ recorder.stop();
+
+ await stopPromise;
+ assert_array_equals(events, ["start", "stop"]);
+ }, "MediaRecorder will fire start event even if stopped synchronously");
+
+ promise_test(async t => {
+ const {stream} = createAudioVideoStream(t);
+ const recorder = new MediaRecorder(stream);
+ const events = [];
+ const startPromise = new Promise(resolve => recorder.onstart = resolve);
+ const stopPromise = new Promise(resolve => recorder.onstop = resolve);
+ const errorPromise = new Promise(resolve => recorder.onerror = resolve);
+ const dataPromise = new Promise(resolve => recorder.ondataavailable = resolve);
+
+ startPromise.then(() => events.push("start"));
+ stopPromise.then(() => events.push("stop"));
+ errorPromise.then(() => events.push("error"));
+ dataPromise.then(() => events.push("data"));
+
+ recorder.start();
+ stream.removeTrack(stream.getAudioTracks()[0]);
+
+ await stopPromise;
+ assert_array_equals(events, ["start", "error", "data", "stop"]);
+ }, "MediaRecorder will fire start event even if a track is removed synchronously");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/idlharness.window.js b/testing/web-platform/tests/mediacapture-record/idlharness.window.js
new file mode 100644
index 0000000000..99e884530c
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/idlharness.window.js
@@ -0,0 +1,40 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+'use strict';
+
+// https://w3c.github.io/mediacapture-record/
+
+idl_test(
+ ['mediastream-recording'],
+ ['mediacapture-streams', 'FileAPI', 'html', 'dom', 'webidl'],
+ idl_array => {
+ // Ignored errors will be surfaced in idlharness.js's test_object below.
+ let recorder, blob, error;
+ try {
+ const canvas = document.createElement('canvas');
+ document.body.appendChild(canvas);
+ const context = canvas.getContext("2d");
+ context.fillStyle = "red";
+ context.fillRect(0, 0, 10, 10);
+ const stream = canvas.captureStream();
+ recorder = new MediaRecorder(stream);
+ } catch(e) {}
+ idl_array.add_objects({ MediaRecorder: [recorder] });
+
+ try {
+ blob = new BlobEvent("type", {
+ data: new Blob(),
+ timecode: performance.now(),
+ });
+ } catch(e) {}
+ idl_array.add_objects({ BlobEvent: [blob] });
+
+ try {
+ error = new MediaRecorderErrorEvent("type", {
+ error: new DOMException,
+ });
+ } catch(e) {}
+ idl_array.add_objects({ MediaRecorderErrorEvent: [error] });
+ }
+);
diff --git a/testing/web-platform/tests/mediacapture-record/passthrough/MediaRecorder-passthrough.https.html b/testing/web-platform/tests/mediacapture-record/passthrough/MediaRecorder-passthrough.https.html
new file mode 100644
index 0000000000..ceeae2eade
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/passthrough/MediaRecorder-passthrough.https.html
@@ -0,0 +1,74 @@
+<!doctype html>
+<html>
+
+<head>
+ <title>MediaRecorder peer connection</title>
+ <link rel="help"
+ href="https://w3c.github.io/mediacapture-record/MediaRecorder.html#dom-mediarecorder-mimeType">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/resources/testdriver.js"></script>
+ <script src="/resources/testdriver-vendor.js"></script>
+ <script src="../../mediacapture-streams/permission-helper.js"></script>
+ <script src="../utils/peerconnection.js"></script>
+</head>
+
+<body>
+ <video id="remote" autoplay width="240" />
+ <script>
+
+[{kind: "video", audio: false, codecPreference: "VP8", codecRegex: /.*vp8.*/},
+ {kind: "audio/video", audio: true, codecPreference: "VP8", codecRegex: /.*vp8.*/},
+ {kind: "video", audio: false, codecPreference: "VP9", codecRegex: /.*vp9.*/},
+ {kind: "audio/video", audio: true, codecPreference: "VP9", codecRegex: /.*vp9.*/}]
+ .forEach(args => {
+ promise_test(async t => {
+ const [localPc, remotePc, stream] = await startConnection(
+ t, args.audio, /*video=*/true, args.codecPreference);
+
+ // Needed for the tests to get exercised in Chrome (bug)
+ document.getElementById('remote').srcObject = stream;
+
+ const recorder = new MediaRecorder(stream); // Passthrough.
+ const onstartPromise = new Promise(resolve => {
+ recorder.onstart = t.step_func(() => {
+ assert_regexp_match(
+ recorder.mimeType, args.codecRegex,
+ "mimeType is matching " + args.codecPreference +
+ " in case of passthrough.");
+ resolve();
+ });
+ });
+ recorder.start();
+ await(onstartPromise);
+ }, "PeerConnection passthrough MediaRecorder receives " +
+ args.codecPreference + " after onstart with a " + args.kind +
+ " stream.");
+ });
+
+promise_test(async t => {
+ const [localPc, remotePc, stream, transceivers] = await startConnection(
+ t, /*audio=*/false, /*video=*/true, /*videoCodecPreference=*/"VP8");
+
+ // Needed for the tests to get exercised in Chrome (bug)
+ document.getElementById('remote').srcObject = stream;
+
+ const recorder = new MediaRecorder(stream); // Possibly passthrough.
+ recorder.start();
+ await waitForReceivedFramesOrPackets(t, remotePc, false, true, 10);
+
+ // Switch codec to VP9; we expect onerror to not be invoked.
+ recorder.onerror = t.step_func(() => assert_unreached(
+ "MediaRecorder should be prepared to handle codec switches"));
+ setTransceiverCodecPreference(transceivers.video, "VP9");
+ await Promise.all([
+ exchangeOfferAnswer(localPc, remotePc),
+ waitForReceivedCodec(t, remotePc, "VP9")
+ ]);
+}, "PeerConnection passthrough MediaRecorder should be prepared to handle " +
+ "the codec switching from VP8 to VP9");
+
+</script>
+</body>
+
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/support/MediaRecorder-iframe.html b/testing/web-platform/tests/mediacapture-record/support/MediaRecorder-iframe.html
new file mode 100644
index 0000000000..df60c4e8e1
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/support/MediaRecorder-iframe.html
@@ -0,0 +1,20 @@
+<!DOCTYPE html>
+<title>Start a MediaRecorder</title>
+<html>
+<body>
+<script src="../utils/sources.js"></script>
+<script>
+ var context;
+ var recorder;
+ var video;
+ var control;
+
+ function prepareForTest(test) {
+ const obj = createVideoStream(test);
+ video = obj.stream;
+ control = obj.control;
+ recorder = new MediaRecorder(video);
+ }
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-record/utils/peerconnection.js b/testing/web-platform/tests/mediacapture-record/utils/peerconnection.js
new file mode 100644
index 0000000000..26a925abf0
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/utils/peerconnection.js
@@ -0,0 +1,141 @@
+/**
+ * @fileoverview Utility functions for tests utilizing PeerConnections
+ */
+
+/**
+ * Exchanges offers and answers between two peer connections.
+ *
+ * pc1's offer is set as local description in pc1 and
+ * remote description in pc2. After that, pc2's answer
+ * is set as it's local description and remote description in pc1.
+ *
+ * @param {!RTCPeerConnection} pc1 The first peer connection.
+ * @param {!RTCPeerConnection} pc2 The second peer connection.
+ */
+async function exchangeOfferAnswer(pc1, pc2) {
+ await pc1.setLocalDescription(await pc1.createOffer());
+ await pc2.setRemoteDescription(pc1.localDescription);
+ await pc2.setLocalDescription(await pc2.createAnswer());
+ await pc1.setRemoteDescription(pc2.localDescription);
+}
+
+/**
+ * Sets the specified codec preference if it's included in the transceiver's
+ * list of supported codecs.
+ * @param {!RTCRtpTransceiver} transceiver The RTP transceiver.
+ * @param {string} codecPreference The codec preference.
+ */
+function setTransceiverCodecPreference(transceiver, codecPreference) {
+ for (const codec of RTCRtpSender.getCapabilities('video').codecs) {
+ if (codec.mimeType.includes(codecPreference)) {
+ transceiver.setCodecPreferences([codec]);
+ return;
+ }
+ }
+}
+
+/**
+ * Starts a connection between two peer connections, using a audio and/or video
+ * stream.
+ * @param {*} t Test instance.
+ * @param {boolean} audio True if audio should be used.
+ * @param {boolean} video True if video should be used.
+ * @param {string} [videoCodecPreference] String containing the codec preference.
+ * @returns an array with the two connected peer connections, the remote stream,
+ * and an object containing transceivers by kind.
+ */
+async function startConnection(t, audio, video, videoCodecPreference) {
+ const scope = [];
+ if (audio) scope.push("microphone");
+ if (video) scope.push("camera");
+ await setMediaPermission("granted", scope);
+ const stream = await navigator.mediaDevices.getUserMedia({audio, video});
+ t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
+ const pc1 = new RTCPeerConnection();
+ t.add_cleanup(() => pc1.close());
+ const pc2 = new RTCPeerConnection();
+ t.add_cleanup(() => pc2.close());
+ const transceivers = {};
+ for (const track of stream.getTracks()) {
+ const transceiver = pc1.addTransceiver(track, {streams: [stream]});
+ transceivers[track.kind] = transceiver;
+ if (videoCodecPreference && track.kind == 'video') {
+ setTransceiverCodecPreference(transceiver, videoCodecPreference);
+ }
+ }
+ for (const [local, remote] of [[pc1, pc2], [pc2, pc1]]) {
+ local.addEventListener('icecandidate', ({candidate}) => {
+ if (!candidate || remote.signalingState == 'closed') return;
+ remote.addIceCandidate(candidate);
+ });
+ }
+ const haveTrackEvent = new Promise(r => pc2.ontrack = r);
+ await exchangeOfferAnswer(pc1, pc2);
+ const {streams} = await haveTrackEvent;
+ return [pc1, pc2, streams[0], transceivers];
+}
+
+/**
+ * Given a peer connection, return after at least numFramesOrPackets
+ * frames (video) or packets (audio) have been received.
+ * @param {*} t Test instance.
+ * @param {!RTCPeerConnection} pc The peer connection.
+ * @param {boolean} lookForAudio True if audio packets should be waited for.
+ * @param {boolean} lookForVideo True if video packets should be waited for.
+ * @param {int} numFramesOrPackets Number of frames (video) and packets (audio)
+ * to wait for.
+ */
+async function waitForReceivedFramesOrPackets(
+ t, pc, lookForAudio, lookForVideo, numFramesOrPackets) {
+ let initialAudioPackets = 0;
+ let initialVideoFrames = 0;
+ while (lookForAudio || lookForVideo) {
+ const report = await pc.getStats();
+ for (const stats of report.values()) {
+ if (stats.type == 'inbound-rtp') {
+ if (lookForAudio && stats.kind == 'audio') {
+ if (!initialAudioPackets) {
+ initialAudioPackets = stats.packetsReceived;
+ } else if (stats.packetsReceived > initialAudioPackets +
+ numFramesOrPackets) {
+ lookForAudio = false;
+ }
+ }
+ if (lookForVideo && stats.kind == 'video') {
+ if (!initialVideoFrames) {
+ initialVideoFrames = stats.framesDecoded;
+ } else if (stats.framesDecoded > initialVideoFrames +
+ numFramesOrPackets) {
+ lookForVideo = false;
+ }
+ }
+ }
+ }
+ await new Promise(r => t.step_timeout(r, 100));
+ }
+}
+
+/**
+ * Given a peer connection, return after one of its inbound RTP connections
+ * includes use of the specified codec.
+ * @param {*} t Test instance.
+ * @param {!RTCPeerConnection} pc The peer connection.
+ * @param {string} codecToLookFor The waited-for codec.
+ */
+async function waitForReceivedCodec(t, pc, codecToLookFor) {
+ let currentCodecId;
+ for (;;) {
+ const report = await pc.getStats();
+ for (const stats of report.values()) {
+ if (stats.type == 'inbound-rtp' && stats.kind == 'video') {
+ if (stats.codecId) {
+ if (report.get(stats.codecId).mimeType.toLowerCase()
+ .includes(codecToLookFor.toLowerCase())) {
+ return;
+ }
+ }
+ }
+ }
+ await new Promise(r => t.step_timeout(r, 100));
+ }
+}
diff --git a/testing/web-platform/tests/mediacapture-record/utils/sources.js b/testing/web-platform/tests/mediacapture-record/utils/sources.js
new file mode 100644
index 0000000000..44947272d6
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-record/utils/sources.js
@@ -0,0 +1,75 @@
+function createAudioStream(t) {
+ const ac = new AudioContext();
+ const { stream } = ac.createMediaStreamDestination();
+ const [track] = stream.getTracks();
+ t.add_cleanup(() => {
+ ac.close();
+ track.stop();
+ });
+ return { stream };
+}
+
+function createFlowingAudioStream(t) {
+ const ac = new AudioContext();
+ const dest = ac.createMediaStreamDestination();
+ const osc = ac.createOscillator();
+ osc.connect(dest);
+ osc.start();
+ const [track] = dest.stream.getTracks();
+ t.add_cleanup(() => {
+ ac.close();
+ track.stop();
+ });
+ return { stream: dest.stream };
+}
+
+function createVideoStream(t) {
+ const canvas = document.createElement("canvas");
+ canvas.id = "canvas";
+ document.body.appendChild(canvas);
+ const ctx = canvas.getContext("2d");
+ const stream = canvas.captureStream();
+ const [track] = stream.getTracks();
+ t.add_cleanup(() => {
+ document.body.removeChild(canvas);
+ track.stop();
+ });
+ const addVideoFrame = () => {
+ ctx.fillStyle = "red";
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+ };
+ return { stream, control: { addVideoFrame } };
+}
+
+function createFlowingVideoStream(t) {
+ const { stream } = createVideoStream(t);
+ const [track] = stream.getTracks();
+ const canvas = document.getElementById("canvas");
+ const ctx = canvas.getContext("2d");
+ ctx.fillStyle = "green";
+ requestAnimationFrame(function draw() {
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+ if (track.readyState == "live") {
+ requestAnimationFrame(draw);
+ }
+ });
+ return { stream };
+}
+
+function createAudioVideoStream(t) {
+ const { stream: audio } = createAudioStream(t);
+ const { stream: video, control } = createVideoStream(t);
+ return {
+ stream: new MediaStream([...audio.getTracks(), ...video.getTracks()]),
+ control,
+ };
+}
+
+function createFlowingAudioVideoStream(t) {
+ return {
+ stream: new MediaStream([
+ ...createFlowingAudioStream(t).stream.getTracks(),
+ ...createFlowingVideoStream(t).stream.getTracks(),
+ ]),
+ };
+}
diff --git a/testing/web-platform/tests/mediacapture-region/CropTarget-fromElement.https.html b/testing/web-platform/tests/mediacapture-region/CropTarget-fromElement.https.html
new file mode 100644
index 0000000000..bc1847e481
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-region/CropTarget-fromElement.https.html
@@ -0,0 +1,92 @@
+<!doctype html>
+<html>
+
+<head>
+ <title>Test CropTarget.fromElement()</title>
+ <meta name='assert' content='Test CropTarget.fromElement().' />
+</head>
+
+<body>
+ <h1 class="instructions">Description</h1>
+ <p class="instructions">
+ This test checks for the behavior of <code>CropTarget.fromElement()</code>.
+ </p>
+
+ <div id='test-div'></div>
+ <iframe id='test-iframe' src="about:blank"></iframe>
+ <img id='test-img' alt='Alt text' width="500" height="600">
+ <div id='log'></div>
+
+ <script src=/resources/testharness.js></script>
+ <script src=/resources/testharnessreport.js></script>
+
+ <script>
+ "use strict";
+
+ promise_test(async () => {
+ assert_true(!!CropTarget.fromElement);
+ const crop_target = await CropTarget.fromElement(
+ document.getElementById('test-iframe'));
+ assert_equals(crop_target.constructor.name, 'CropTarget');
+ }, "Produces a CropTarget for Elements of subtype iframe.");
+
+ promise_test(async () => {
+ assert_true(!!CropTarget.fromElement);
+ const crop_target = await CropTarget.fromElement(
+ document.getElementById('test-div'));
+ assert_equals(crop_target.constructor.name, 'CropTarget');
+ }, "Produces a CropTarget for Elements of subtype div.");
+
+ // TODO(crbug.com/1247761): Re-enable after rolling out the
+ // experiment to allow any Element.
+ // promise_test(function (t) {
+ // assert_true(!!CropTarget.fromElement);
+ //
+ // return promise_rejects_dom(t, "NotSupportedError",
+ // CropTarget.fromElement(document.getElementById("test-img")));
+ // }, "Produces a CropTarget for Elements of subtype img.");
+
+ promise_test(t => {
+ assert_true(!!CropTarget.fromElement);
+ return promise_rejects_js(t, TypeError,
+ CropTarget.fromElement(undefined));
+ }, "Rejects undefined with a TypeError.");
+
+ promise_test(t => {
+ assert_true(!!CropTarget.fromElement);
+ return promise_rejects_js(t, TypeError, CropTarget.fromElement(123));
+ }, "Rejects a non-Element with a TypeError.");
+
+ promise_test(async () => {
+ assert_true(!!CropTarget.fromElement);
+
+ const div_crop_target = await CropTarget.fromElement(
+ document.getElementById('test-div'));
+ assert_equals(div_crop_target.constructor.name, 'CropTarget');
+
+ const iframe_crop_target = await CropTarget.fromElement(
+ document.getElementById('test-iframe'));
+ assert_equals(iframe_crop_target.constructor.name, 'CropTarget');
+
+ assert_not_equals(div_crop_target, iframe_crop_target);
+ }, "Distinct Elements produce distinct CropTargets.");
+
+ promise_test(async () => {
+ assert_true(!!CropTarget.fromElement);
+
+ const div = document.getElementById('test-div');
+ const div_crop_target = await CropTarget.fromElement(div);
+ assert_equals(div_crop_target.constructor.name, 'CropTarget');
+
+ const clone = div.cloneNode(true);
+ document.querySelector('body').appendChild(clone);
+ const clone_crop_target = await CropTarget.fromElement(clone);
+ assert_equals(clone_crop_target.constructor.name, 'CropTarget');
+
+ assert_not_equals(div_crop_target, clone_crop_target);
+ }, "Cloned Elements produce distinct CropTargets.");
+
+ </script>
+</body>
+
+</html> \ No newline at end of file
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-api.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-api.https.html
new file mode 100644
index 0000000000..1483170176
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-api.https.html
@@ -0,0 +1,22 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia: test that getUserMedia is present</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermedia">
+<meta name='assert' content='Check that the getUserMedia() method is present.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of the
+<code>navigator.mediaDevices.getUserMedia</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+test(function () {
+ assert_true(undefined !== navigator.mediaDevices && undefined !== navigator.mediaDevices.getUserMedia, "navigator.mediaDevices.getUserMedia exists");
+}, "mediaDevices.getUserMedia() is present on navigator");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-deny.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-deny.https.html
new file mode 100644
index 0000000000..2042b038b6
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-deny.https.html
@@ -0,0 +1,35 @@
+<!doctype html>
+<html>
+<head>
+ <title>getUserMedia() triggers error callback when auth is denied</title>
+ <link rel="author" title="Dr. A. Gouaillard" href="mailto:agouaillard@gmail.com"/>
+ <link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia">
+</head>
+<body>
+ <p class="instructions">When prompted, <strong>please deny</strong> access to
+ the video stream.</p>
+ <h1 class="instructions">Description</h1>
+ <p class="instructions">This test checks that the error callback is triggered
+ when user denies access to the video stream.</p>
+ <div id='log'></div>
+<script src=/resources/testharness.js></script>
+ <script src=/resources/testharnessreport.js></script>
+ <script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+ <script>
+promise_test(async () => {
+ try {
+ await setMediaPermission('denied', ['camera']);
+ await navigator.mediaDevices.getUserMedia({video: true})
+ } catch (error) {
+ assert_throws_dom("NotAllowedError", () => { throw error });
+ assert_false('constraintName' in error,
+ "constraintName attribute not set as expected");
+ return;
+ };
+ assert_unreached("The success callback should not be triggered since access is to be denied");
+}, "Tests that the error callback is triggered when permission is denied");
+ </script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-empty-option-param.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-empty-option-param.https.html
new file mode 100644
index 0000000000..5c6b3785a3
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-empty-option-param.https.html
@@ -0,0 +1,34 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia({}) rejects with TypeError</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia">
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that getUserMedia with no value in the
+options parameter raises a TypeError exception.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+promise_test(async () => {
+ try {
+ // Race a settled promise to check that the returned promise is already
+ // rejected.
+ await Promise.race([navigator.mediaDevices.getUserMedia({}),
+ Promise.resolve()]);
+ } catch (error) {
+ assert_throws_js(TypeError, () => { throw error });
+ assert_false('constraintName' in error,
+ "constraintName attribute not set as expected");
+ return;
+ }
+ assert_unreached("should have returned an already-rejected promise.");
+}, "Tests that getUserMedia is rejected with a TypeError when used with an empty options parameter");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-impossible-constraint.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-impossible-constraint.https.html
new file mode 100644
index 0000000000..c65b2860a2
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-impossible-constraint.https.html
@@ -0,0 +1,37 @@
+<!doctype html>
+<html>
+<head>
+<title>Trivial mandatory constraint in getUserMedia</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#idl-def-NavigatorUserMediaError">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that setting an impossible mandatory
+constraint (width &gt;=1G) in getUserMedia works</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ // Note - integer conversion is weird for +inf and numbers > 2^32, so we
+ // use a number less than 2^32 for testing.
+ try {
+ await navigator.mediaDevices.getUserMedia({video: {width: {min:100000000}}});
+ assert_unreached("a Video stream of width 100M cannot be created");
+
+ } catch (error) {
+ assert_equals(error.name, "OverconstrainedError", "An impossible constraint triggers a OverconstrainedError");
+ assert_equals(error.constraint, "width", "The name of the not satisfied error is given in error.constraint");
+ }
+}, "Tests that setting an impossible constraint in getUserMedia fails");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-invalid-facing-mode.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-invalid-facing-mode.https.html
new file mode 100644
index 0000000000..8da6c9754e
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-invalid-facing-mode.https.html
@@ -0,0 +1,31 @@
+<!doctype html>
+<html>
+<head>
+<title>Invalid facingMode in getUserMedia</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#def-constraint-facingMode">
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that trying to set an empty facingMode
+ value in getUserMedia results in an OverconstrainedError.
+</p>
+
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ try {
+ await navigator.mediaDevices.getUserMedia({video: {facingMode: {exact: ''}}});
+ assert_unreached("The empty string is not a valid facingMode");
+ } catch (error) {
+ assert_equals(error.name, "OverconstrainedError");
+ assert_equals(error.constraint, "facingMode");
+ };
+}, "Tests that setting an invalid facingMode constraint in getUserMedia fails");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-non-applicable-constraint.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-non-applicable-constraint.https.html
new file mode 100644
index 0000000000..3e9481bfa4
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-non-applicable-constraint.https.html
@@ -0,0 +1,77 @@
+<!doctype html>
+<title>non-applicable constraint in getUserMedia</title>
+<link rel="author" title="Intel" href="http://www.intel.com"/>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#methods-5">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+ <script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+
+<p class="instructions">When prompted, accept to share your audio and video stream.</p>
+
+<script>
+
+let video_only_valid_constraints = {
+ width: {min: 0},
+ height: {min: 0},
+ frameRate: {min: 0},
+ aspectRatio: {min: 0},
+ facingMode: {ideal: 'environment'},
+ resizeMode: {ideal: 'none'}
+}
+
+let video_only_invalid_constraints = {
+ width: {min: 100000000},
+ height: {min: 100000000},
+ frameRate: {min: 100000000},
+ aspectRatio: {min: 100000000},
+ facingMode: {exact: 'invalid'},
+ resizeMode: {exact: 'invalid'}
+}
+
+let audio_only_valid_constraints = {
+ volume: {min: 0},
+ sampleRate: {min: 0},
+ sampleSize: {min: 0},
+ echoCancellation: {ideal: true},
+ autoGainControl: {ideal: true},
+ noiseSuppression: {ideal: true},
+ latency: {min: 0},
+ channelCount: {min: 0}
+}
+
+let audio_only_invalid_constraints = {
+ volume: {min: 2},
+ sampleRate: {min: 100000000},
+ sampleSize: {min: 100000000},
+ echoCancellation: {exact: true},
+ autoGainControl: {exact: true},
+ noiseSuppression: {exact: true},
+ latency: {max: 0},
+ channelCount: {max: 0}
+}
+
+promise_test(async () => {
+ // Both permissions are needed at some point, asking for both at once
+ await setMediaPermission();
+ let stream = await navigator.mediaDevices.getUserMedia({audio: video_only_valid_constraints})
+ assert_equals(stream.getAudioTracks().length, 1, "the media stream has exactly one audio track");
+}, 'Test that setting video-only valid constraints inside of "audio" is simply ignored');
+
+promise_test(async () => {
+ let stream = await navigator.mediaDevices.getUserMedia({audio: video_only_invalid_constraints})
+ assert_equals(stream.getAudioTracks().length, 1, "the media stream has exactly one audio track");
+}, 'Test that setting video-only invalid constraints inside of "audio" is simply ignored');
+
+promise_test(async () => {
+ let stream = await navigator.mediaDevices.getUserMedia({video: audio_only_valid_constraints})
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+}, 'Test that setting audio-only valid constraints inside of "video" is simply ignored');
+
+promise_test(async () => {
+ let stream = await navigator.mediaDevices.getUserMedia({video: audio_only_invalid_constraints})
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+}, 'Test that setting audio-only invalid constraints inside of "video" is simply ignored');
+
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-optional-constraint.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-optional-constraint.https.html
new file mode 100644
index 0000000000..e8892a5916
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-optional-constraint.https.html
@@ -0,0 +1,32 @@
+<!doctype html>
+<html>
+<head>
+<title>Optional constraint recognized as optional in getUserMedia</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that setting an optional constraint in
+getUserMedia is handled as optional</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ try {
+ const stream = await navigator.mediaDevices.getUserMedia({video: {advanced: [{width: {min:1024, max: 800}}]}});
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+ } catch (error) {
+ assert_unreached("an optional constraint can't stop us from obtaining a video stream");
+ }
+}, "Tests that setting an optional constraint in getUserMedia is handled as optional");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-required-constraint-with-ideal-value.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-required-constraint-with-ideal-value.https.html
new file mode 100644
index 0000000000..7f234c5c74
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-required-constraint-with-ideal-value.https.html
@@ -0,0 +1,33 @@
+<!doctype html>
+<html>
+<head>
+<title>Ideal value in required constraint in getUserMedia</title>
+<link rel="author" title="Intel" href="http://www.intel.com"/>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dfn-fitness-distance">
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that setting a required constraint
+with an ideal value in getUserMedia works</p>
+<div id='log'></div>
+<script>
+promise_test(async t => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: {width: {ideal: 320, min: 160}}});
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+ assert_equals(stream.getVideoTracks()[0].getSettings().width, 320, 'ideal width is selected for getUserMedia() video tracks');
+ const video = document.createElement('video');
+ video.srcObject = stream;
+ await video.play();
+ assert_equals(video.videoWidth, 320, 'video width equals to track width');
+ stream.getVideoTracks()[0].stop();
+}, "Tests that setting a required constraint with an ideal value in getUserMedia works");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-trivial-constraint.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-trivial-constraint.https.html
new file mode 100644
index 0000000000..66dd3a23c8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-trivial-constraint.https.html
@@ -0,0 +1,32 @@
+<!doctype html>
+<html>
+<head>
+<title>Trivial mandatory constraint in getUserMedia</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that setting a trivial mandatory
+constraint (width &gt;=0) in getUserMedia works</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission();
+ try {
+ const stream = await navigator.mediaDevices.getUserMedia({video: {width: {min:0}}})
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+ } catch (error) {
+ assert_unreached("a Video stream of minimally zero width can always be created");
+ }
+}, "Tests that setting a trivial mandatory constraint in getUserMedia works");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/GUM-unknownkey-option-param.https.html b/testing/web-platform/tests/mediacapture-streams/GUM-unknownkey-option-param.https.html
new file mode 100644
index 0000000000..edd727d5e3
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/GUM-unknownkey-option-param.https.html
@@ -0,0 +1,31 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia({doesnotexist:true}) rejects with TypeError</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that getUserMedia with an unknown value
+in the constraints parameter rejects with a TypeError.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ try {
+ await navigator.mediaDevices.getUserMedia({doesnotexist:true})
+ assert_unreached("This should never be triggered since the constraints parameter only contains an unrecognized constraint");
+ } catch (error) {
+ assert_equals(error.name, "TypeError", "TypeError returned as expected");
+ assert_equals(error.constraintName, undefined, "constraintName attribute not set as expected");
+ }
+}, "Tests that getUserMedia is rejected with a TypeError when used with an unknown constraint");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/META.yml b/testing/web-platform/tests/mediacapture-streams/META.yml
new file mode 100644
index 0000000000..97363cf368
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/META.yml
@@ -0,0 +1,5 @@
+spec: https://w3c.github.io/mediacapture-main/
+suggested_reviewers:
+ - alvestrand
+ - youennf
+ - jan-ivar
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-SecureContext.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-SecureContext.html
new file mode 100644
index 0000000000..bada628176
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-SecureContext.html
@@ -0,0 +1,19 @@
+<!doctype html>
+<html>
+<head>
+<title>MediaDevices and SecureContext</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+</head>
+<body>
+<script>
+test(function() {
+ assert_false(window.isSecureContext, "This test must be run in a non secure context");
+ assert_false('MediaDevices' in window, "MediaDevices is not exposed");
+ assert_false('MediaDeviceInfo' in window, "MediaDeviceInfo is not exposed");
+ assert_false('getUserMedia' in navigator, "getUserMedia is not exposed");
+ assert_false('mediaDevices' in navigator, "mediaDevices is not exposed");
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-after-discard.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-after-discard.https.html
new file mode 100644
index 0000000000..ef4f492964
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-after-discard.https.html
@@ -0,0 +1,64 @@
+<!doctype html>
+<title>Test promises from MediaDevices methods in a discarded browsing
+ context</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<body></body>
+<script>
+let devices;
+let child_DOMException;
+setup(() => {
+ const frame = document.createElement('iframe');
+ document.body.appendChild(frame);
+ devices = frame.contentWindow.navigator.mediaDevices;
+ child_DOMException = frame.contentWindow.DOMException;
+ frame.remove();
+});
+
+// https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia
+// If the current settings object's responsible document is NOT fully active,
+// return a promise rejected with a DOMException object whose name attribute
+// has the value "InvalidStateError".
+promise_test(async () => {
+ await setMediaPermission("granted", ["microphone"]);
+ // `catch()` is used rather than static Promise methods because microtasks
+ // for `PromiseResolve()` do not run when Promises in inactive Documents are
+ // involved. Whether microtasks for `catch()` run depends on the realm of
+ // the handler rather than the realm of the Promise.
+ // See https://github.com/whatwg/html/issues/5319.
+ let promise_already_rejected = false;
+ let rejected_reason;
+ devices.getUserMedia({audio:true}).catch(reason => {
+ promise_already_rejected = true;
+ rejected_reason = reason;
+ });
+ // Race a settled promise to check that the returned promise is already
+ // rejected.
+ await Promise.reject().catch(() => {
+ assert_true(promise_already_rejected,
+ 'should have returned an already-rejected promise.');
+ assert_throws_dom('InvalidStateError', child_DOMException,
+ () => { throw rejected_reason });
+ });
+}, 'getUserMedia() in a discarded browsing context');
+
+// https://w3c.github.io/mediacapture-main/#dom-mediadevices-enumeratedevices
+// https://w3c.github.io/mediacapture-main/#device-enumeration-can-proceed
+// Device enumeration can proceed steps return false when device enumeration
+// can be exposed is true and the document is not fully active.
+promise_test(async () => {
+ let promise_state = 'pending';
+ // `then()` is used to avoid methods that apply `PromiseResolve()` to
+ // Promises from inactive realms, which would lead to microtasks that don't
+ // run.
+ devices.enumerateDevices().then(() => promise_state = 'resolved',
+ () => promise_state = 'rejected');
+ // Enumerate in the parent document to provide enough time to check that the
+ // Promise from the inactive document does not settle.
+ await navigator.mediaDevices.enumerateDevices();
+ assert_equals(promise_state, 'pending', 'Promise state');
+}, 'enumerateDevices() in a discarded browsing context');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html
new file mode 100644
index 0000000000..1bb086dadd
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices: test enumerateDevices should not expose camera devices if they are not allowed to use</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-enumeratedevices">
+<meta name='assert' content='Check that the enumerateDevices() method should not exposed camera devices.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of camera in
+<code>navigator.mediaDevices.enumerateDevices()</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+"use strict";
+promise_test(async () => {
+ assert_not_equals(navigator.mediaDevices.enumerateDevices, undefined, "navigator.mediaDevices.enumerateDevices exists");
+ const deviceList = await navigator.mediaDevices.enumerateDevices();
+ for (const mediaInfo of deviceList) {
+ assert_not_equals(mediaInfo.deviceId, undefined, "mediaInfo's deviceId should exist.");
+ assert_not_equals(mediaInfo.kind, undefined, "mediaInfo's kind should exist.");
+ assert_not_equals(mediaInfo.label, undefined, "mediaInfo's label should exist.");
+ assert_not_equals(mediaInfo.groupId, undefined, "mediaInfo's groupId should exist.");
+ assert_in_array(mediaInfo.kind, ["audioinput", "audiooutput"]);
+ }
+}, "Camera is not exposed in mediaDevices.enumerateDevices()");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html.headers b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html.headers
new file mode 100644
index 0000000000..2adc5e237f
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-camera.https.html.headers
@@ -0,0 +1 @@
+Feature-Policy: camera 'none'
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html
new file mode 100644
index 0000000000..8d535ce551
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices: test enumerateDevices should not expose microphone devices if they are not allowed to use</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediadevices-enumeratedevices">
+<meta name='assert' content='Check that the enumerateDevices() method should not exposed microphone devices.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of microphone in
+<code>navigator.mediaDevices.enumerateDevices()</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+"use strict";
+promise_test(async () => {
+ assert_not_equals(navigator.mediaDevices.enumerateDevices, undefined, "navigator.mediaDevices.enumerateDevices exists");
+ const deviceList = await navigator.mediaDevices.enumerateDevices();
+ for (const mediaInfo of deviceList) {
+ assert_not_equals(mediaInfo.deviceId, undefined, "mediaInfo's deviceId should exist.");
+ assert_not_equals(mediaInfo.kind, undefined, "mediaInfo's kind should exist.");
+ assert_not_equals(mediaInfo.label, undefined, "mediaInfo's label should exist.");
+ assert_not_equals(mediaInfo.groupId, undefined, "mediaInfo's groupId should exist.");
+ assert_in_array(mediaInfo.kind, ["videoinput", "audiooutput"]);
+ }
+}, "Microphone is not exposed in mediaDevices.enumerateDevices()");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html.headers b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html.headers
new file mode 100644
index 0000000000..a86e0a0778
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-not-allowed-mic.https.html.headers
@@ -0,0 +1 @@
+Feature-Policy: microphone 'none'
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-per-origin-ids.sub.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-per-origin-ids.sub.https.html
new file mode 100644
index 0000000000..714355f5c1
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-per-origin-ids.sub.https.html
@@ -0,0 +1,87 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices rotates deviceId across origins and after cookies get cleared</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+</head>
+<body>
+ <iframe allow="camera 'src';microphone 'src'" id=same src="/mediacapture-streams/iframe-enumerate.html"></iframe>
+<iframe allow="camera 'src';microphone 'src'" id=cross src="https://{{hosts[][www1]}}:{{ports[https][0]}}/mediacapture-streams/iframe-enumerate.html"></iframe>
+<script>
+
+ let deviceList;
+
+ promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({audio : true, video: true});
+ stream.getTracks().forEach(t => t.stop());
+ deviceList = await navigator.mediaDevices.enumerateDevices();
+ const msgWatcher = new EventWatcher(t, window, ['message']);
+ frames[0].postMessage('run', '*')
+ const e = await msgWatcher.wait_for('message');
+ const iframeDevices = e.data.devices;
+ assert_equals(deviceList.length, iframeDevices.length, "Same number of devices detected same-origin");
+ for (const device of deviceList) {
+ // Look for the same device in the iframe based on deviceId
+ // "default" can be used across several kinds, so it needs an additional check
+ // but we limit that check to "default" to detect re-use of deviceId across kinds
+ const sameDevice = iframeDevices.find(d => d.deviceId === device.deviceId && (device.deviceId !== "default" || d.kind === device.kind));
+ assert_true(!!sameDevice, "deviceIds stay the same when loaded in same origin");
+ assert_equals(sameDevice.label, device.label, "labels matches when deviceId matches");
+ assert_equals(sameDevice.kind, device.kind, "kind matches when deviceId matches");
+ // The group identifier MUST be uniquely generated for each document.
+ assert_not_equals(sameDevice.groupId, device.groupId, "groupId is specific to a document");
+ }
+ // setting a cookie as a way to detect if cookie clearing gets done
+ document.cookie = "test=true";
+ window.localStorage.touched = true;
+ }, "enumerateDevices has stable deviceIds across same-origin iframe");
+
+ promise_test(async t => {
+ const msgWatcher = new EventWatcher(t, window, ['message']);
+ frames[1].postMessage('run', '*')
+ const e = await msgWatcher.wait_for('message');
+ const iframeDevices = e.data.devices;
+ assert_equals(deviceList.length, iframeDevices.length, "Same number of devices detected cross-origin");
+ for (const device of deviceList) {
+ // An identifier can be reused across origins as long as
+ // it is not tied to the user and can be guessed by other means
+ // In practice, "default" is what is used today, so we hardcode it
+ // to be able to detect the general case of non-shared deviceIds
+ if (device.deviceId !== "default") {
+ const sameDevice = iframeDevices.find(d => d.deviceId === device.deviceId);
+ assert_false(!!sameDevice, "deviceIds are not shared across origin");
+ }
+ assert_false(!!iframeDevices.find(d => d.groupId === device.groupId), "groupId is specific to a document");
+ }
+ }, "enumerateDevices rotates deviceId across different-origin iframe");
+
+ promise_test(async t => {
+ const iframe = document.createElement("iframe");
+ iframe.setAttribute("allow", "camera 'src';microphone 'src'");
+ iframe.src = "/mediacapture-streams/iframe-enumerate-cleared.html";
+ document.body.appendChild(iframe);
+ const loadWatcher = new EventWatcher(t, iframe, ['load']);
+ await loadWatcher.wait_for('load');
+ assert_implements_optional(document.cookie === "", "Clear-Site-Data not enabled, can't test clearing deviceId");
+
+ const msgWatcher = new EventWatcher(t, window, ['message']);
+ frames[2].postMessage('run', '*')
+ const e = await msgWatcher.wait_for('message');
+ const iframeDevices = e.data.devices;
+ assert_equals(deviceList.length, iframeDevices.length, "Same number of devices detected after clearing cookies");
+ for (const device of deviceList) {
+ const sameDevice = iframeDevices.find(d => d.deviceId === device.deviceId);
+ assert_false(!!sameDevice, "deviceIds are not kept after clearing site data");
+ assert_false(!!iframeDevices.find(d => d.groupId === device.groupId), "groupId is specific to a document");
+ }
+
+ }, "enumerateDevices rotates deviceId after clearing site data");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-persistent-permission.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-persistent-permission.https.html
new file mode 100644
index 0000000000..58aacf9856
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-persistent-permission.https.html
@@ -0,0 +1,38 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices depends only on capture state, not permission state</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+</head>
+<body>
+
+<script>
+ promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({audio : true, video: true});
+ stream.getTracks().forEach(t => t.stop());
+ // the page loaded below hasn't had capture enabled
+ // so enumerateDevices should not list detailed info yet
+ const iframe = document.createElement("iframe");
+ iframe.setAttribute("allow", "camera 'src';microphone 'src'");
+ iframe.src = "/mediacapture-streams/iframe-enumerate.html";
+ document.body.appendChild(iframe);
+ const loadWatcher = new EventWatcher(t, iframe, ['load']);
+ await loadWatcher.wait_for('load');
+ const msgWatcher = new EventWatcher(t, window, ['message']);
+ frames[0].postMessage('run', '*')
+ const e = await msgWatcher.wait_for('message');
+ const iframeDevices = e.data.devices;
+ const kinds = iframeDevices.map(({kind}) => kind);
+ assert_equals(kinds.length, new Set(kinds).size, "At most one of a kind prior to capture");
+ for (const device of iframeDevices) {
+ assert_equals(device.deviceId, "", "deviceId pre-capture is empty");
+ assert_equals(device.label, "", "label pre-capture is empty");
+ assert_equals(device.groupId, "", "groupId pre-capture is empty");
+ }
+ });
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-returned-objects.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-returned-objects.https.html
new file mode 100644
index 0000000000..2b5687f672
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices-returned-objects.https.html
@@ -0,0 +1,59 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices is returning new MediaDeviceInfo objects every time</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+</head>
+<body>
+<script>
+function doTest(callGetUserMedia, testName)
+{
+ promise_test(async () => {
+ if (callGetUserMedia) {
+ await setMediaPermission();
+ await navigator.mediaDevices.getUserMedia({audio : true, video: true});
+ }
+
+ const deviceList1 = await navigator.mediaDevices.enumerateDevices();
+ const deviceList2 = await navigator.mediaDevices.enumerateDevices();
+
+ assert_equals(deviceList1.length, deviceList2.length);
+ for (let i = 0; i < deviceList1.length; i++) {
+ const device1 = deviceList1[i];
+ const device2 = deviceList2[i];
+ assert_not_equals(device1, device2);
+ assert_equals(device1.deviceId, device2.deviceId, "deviceId");
+ assert_equals(device1.kind, device2.kind, "kind");
+ if (!callGetUserMedia) {
+ /* For camera and microphone devices,
+ if the browsing context did not capture (i.e. getUserMedia() was not called or never resolved successfully),
+ the MediaDeviceInfo object will contain a valid value for kind
+ but empty strings for deviceId, label, and groupId. */
+ assert_equals(device1.deviceId, "", "deviceId is empty before capture");
+ assert_equals(device1.groupId, "", "groupId is empty before capture");
+ assert_equals(device1.label, "", "label is empty before capture");
+ assert_in_array(device1.kind, ["audioinput", "audiooutput", "videoinput", "kind is set to a valid value before capture"]);
+ }
+ }
+ /* Additionally, at most one device of each kind
+ will be listed in enumerateDevices() result. */
+ // FIXME: ensure browsers are tested as if they had multiple devices of at least one kind -
+ // this probably needs https://w3c.github.io/mediacapture-automation/ support
+ if (!callGetUserMedia) {
+ const deviceKinds = deviceList1.map(d => d.kind);
+ for (let kind of deviceKinds) {
+ assert_equals(deviceKinds.filter(x => x===kind).length, 1, "At most 1 " + kind + " prior to capture");
+ }
+ }
+ }, testName);
+}
+
+doTest(false, "enumerateDevices returns expected mostly empty objects in case device-info permission is not granted");
+doTest(true, "enumerateDevices returns expected objects in case device-info permission is granted");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices.https.html
new file mode 100644
index 0000000000..4971f4fc48
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-enumerateDevices.https.html
@@ -0,0 +1,64 @@
+<!doctype html>
+<html>
+<head>
+<title>enumerateDevices: test that enumerateDevices is present</title>
+<link rel="author" title="Dr Alex Gouaillard" href="mailto:agouaillard@gmail.com"/>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#enumerating-devices">
+<meta name='assert' content='Check that the enumerateDevices() method is present.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of the
+<code>navigator.mediaDevices.enumerateDevices()</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+"use strict";
+
+//NOTE ALEX: for completion, a test for ondevicechange event is missing.
+
+promise_test(async () => {
+ assert_not_equals(navigator.mediaDevices.enumerateDevices, undefined, "navigator.mediaDevices.enumerateDevices exists");
+ const deviceList = await navigator.mediaDevices.enumerateDevices();
+ for (const mediaInfo of deviceList) {
+ assert_not_equals(mediaInfo.kind, undefined, "mediaInfo's kind should exist.");
+ assert_equals(mediaInfo.deviceId, "", "mediaInfo's deviceId should exist and be empty if getUserMedia was never called successfully.");
+ assert_equals(mediaInfo.label, "", "mediaInfo's label should exist and be empty if getUserMedia was never called successfully.");
+ assert_equals(mediaInfo.groupId, "", "mediaInfo's groupId should exist and be empty if getUserMedia was never called successfully.");
+ assert_in_array(mediaInfo.kind, ["videoinput", "audioinput", "audiooutput"]);
+ }
+ assert_less_than_equal(deviceList.filter((item) => { return item.kind == "audioinput"; }).length, 1, "there should be zero or one audio input device ");
+ assert_less_than_equal(deviceList.filter((item) => { return item.kind == "videoinput"; }).length, 1, "there should be zero or one video input device ");
+
+}, "mediaDevices.enumerateDevices() is present and working - before capture");
+
+promise_test(async () => {
+ await setMediaPermission("granted", ["microphone"]);
+ await navigator.mediaDevices.getUserMedia({ audio : true });
+ const deviceList = await navigator.mediaDevices.enumerateDevices();
+ for (const mediaInfo of deviceList) {
+ assert_not_equals(mediaInfo.kind, undefined, "mediaInfo's kind should exist.");
+ assert_not_equals(mediaInfo.deviceId, "", "mediaInfo's deviceId should exist and not be empty.");
+ assert_in_array(mediaInfo.kind, ["videoinput", "audioinput", "audiooutput"]);
+ }
+}, "mediaDevices.enumerateDevices() is present and working - after capture");
+
+promise_test(async () => {
+ const deviceList = await navigator.mediaDevices.enumerateDevices();
+ for (const mediaInfo of deviceList) {
+ if (mediaInfo.kind == "audioinput" || mediaInfo.kind == "videoinput") {
+ assert_true(mediaInfo instanceof InputDeviceInfo);
+ } else if ( mediaInfo.kind == "audiooutput" ) {
+ assert_true(mediaInfo instanceof MediaDeviceInfo);
+ } else {
+ assert_unreached("mediaInfo.kind should be one of 'audioinput', 'videoinput', or 'audiooutput'.")
+ }
+ }
+}, "InputDeviceInfo is supported");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-getSupportedConstraints.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-getSupportedConstraints.https.html
new file mode 100644
index 0000000000..453184a169
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-getSupportedConstraints.https.html
@@ -0,0 +1,48 @@
+<!doctype html>
+<html>
+<head>
+<title>Test navigator.mediaDevices.getSupportedConstraints()</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#enumerating-devices">
+<meta name='assert' content='Test the getSupportedConstraints() method.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of the
+<code>navigator.mediaDevices.getSupportedConstraints()</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+"use strict";
+test(() => {
+ assert_inherits(navigator.mediaDevices, "getSupportedConstraints");
+ assert_equals(typeof navigator.mediaDevices.getSupportedConstraints, "function");
+}, "navigator.mediaDevices.getSupportedConstraints exists");
+
+{
+ const properties = [
+ "width",
+ "height",
+ "aspectRatio",
+ "frameRate",
+ "facingMode",
+ "resizeMode",
+ "sampleRate",
+ "sampleSize",
+ "echoCancellation",
+ "autoGainControl",
+ "noiseSuppression",
+ "latency",
+ "channelCount",
+ "deviceId",
+ "groupId"];
+ properties.forEach(property => {
+ test(()=>{
+ const supportedConstraints = navigator.mediaDevices.getSupportedConstraints();
+ assert_true(supportedConstraints[property]);
+ }, property + " is supported");
+ });
+}
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaDevices-getUserMedia.https.html b/testing/web-platform/tests/mediacapture-streams/MediaDevices-getUserMedia.https.html
new file mode 100644
index 0000000000..96399c804f
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaDevices-getUserMedia.https.html
@@ -0,0 +1,126 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia: test that mediaDevices.getUserMedia is present</title>
+<link rel="author" title="Dr Alex Gouaillard" href="mailto:agouaillard@gmail.com"/>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#mediadevices-interface-extensions">
+<meta name='assert' content='Check that the mediaDevices.getUserMedia() method is present.'/>
+</head>
+<body>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks for the presence of the
+<code>navigator.mediaDevices.getUserMedia</code> method.</p>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+test(function () {
+ assert_not_equals(navigator.mediaDevices.getUserMedia, undefined, "navigator.mediaDevices.getUserMedia exists.");
+ // TODO: do some stuff with it
+ assert_not_equals(navigator.mediaDevices.getSupportedConstraints, undefined, "navigator.mediaDevices.getSupportedConstraints exists.");
+ var list = navigator.mediaDevices.getSupportedConstraints();
+ // TODO: we are supposed to check that all values returned can be used in a constraint ....
+ // NOTE: the current list of attributes that may or may not be here
+ // ... FF for example has many no tin that list, should we fail if an attribute is present but not listed in the specs?
+ // list.width
+ // list.height
+ // list.aspectRatio
+ // list.frameRate
+ // list.facingMode
+ // list.volume
+ // list.sampleRate
+ // list.sampleSize
+ // list.echoCancellation
+ // list.latency
+ // list.channelCount
+ // list.deviceId
+ // list.groupId
+ }, "mediaDevices.getUserMedia() is present on navigator");
+
+promise_test(async t => {
+ // Both permissions are needed at some point, asking both at once
+ await setMediaPermission();
+ assert_true(navigator.mediaDevices.getSupportedConstraints()["groupId"],
+ "groupId should be supported");
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ for (const device of devices) {
+ await navigator.mediaDevices.getUserMedia(
+ {video: {groupId: {exact: device.groupId}}}).then(stream => {
+ const found_device = devices.find(({deviceId}) =>
+ deviceId == stream.getTracks()[0].getSettings().deviceId);
+ assert_not_equals(found_device, undefined);
+ assert_equals(found_device.kind, "videoinput");
+ assert_equals(found_device.groupId, device.groupId);
+ stream.getTracks().forEach(t => t.stop());
+ }, error => {
+ assert_equals(error.name, "OverconstrainedError");
+ assert_equals(error.constraint, "groupId");
+ const found_device = devices.find(element =>
+ element.kind == "videoinput" && element.groupId == device.groupId);
+ assert_equals(found_device, undefined);
+ });
+ }
+}, 'groupId is correctly supported by getUserMedia() for video devices');
+
+promise_test(async t => {
+ assert_true(navigator.mediaDevices.getSupportedConstraints()["groupId"],
+ "groupId should be supported");
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ for (const device of devices) {
+ await navigator.mediaDevices.getUserMedia(
+ {audio: {groupId: {exact: device.groupId}}}).then(stream => {
+ const found_device = devices.find(({deviceId}) =>
+ deviceId == stream.getTracks()[0].getSettings().deviceId);
+ assert_not_equals(found_device, undefined);
+ assert_equals(found_device.kind, "audioinput");
+ assert_equals(found_device.groupId, device.groupId);
+ stream.getTracks().forEach(t => t.stop());
+ }, error => {
+ assert_equals(error.name, "OverconstrainedError");
+ assert_equals(error.constraint, "groupId");
+ const found_device = devices.find(element =>
+ element.kind == "audioinput" && element.groupId == device.groupId);
+ assert_equals(found_device, undefined);
+ });
+ }
+}, 'groupId is correctly supported by getUserMedia() for audio devices');
+
+promise_test(async t => {
+ assert_true(navigator.mediaDevices.getSupportedConstraints()["resizeMode"],
+ "resizeMode should be supported");
+ const stream = await navigator.mediaDevices.getUserMedia(
+ { video: {resizeMode: {exact: 'none'}}});
+ const [track] = stream.getVideoTracks();
+ t.add_cleanup(() => track.stop());
+ assert_equals(track.getSettings().resizeMode, 'none');
+}, 'getUserMedia() supports setting none as resizeMode.');
+
+promise_test(async t => {
+ assert_true(navigator.mediaDevices.getSupportedConstraints()["resizeMode"],
+ "resizeMode should be supported");
+ const stream = await navigator.mediaDevices.getUserMedia(
+ { video: {resizeMode: {exact: 'crop-and-scale'}}});
+ const [track] = stream.getVideoTracks();
+ t.add_cleanup(() => track.stop());
+ assert_equals(track.getSettings().resizeMode, 'crop-and-scale');
+}, 'getUserMedia() supports setting crop-and-scale as resizeMode.');
+
+promise_test(async t => {
+ assert_true(navigator.mediaDevices.getSupportedConstraints()["resizeMode"],
+ "resizeMode should be supported");
+ try {
+ const stream = await navigator.mediaDevices.getUserMedia(
+ { video: {resizeMode: {exact: 'INVALID'}}});
+ t.add_cleanup(() => stream.getVideoTracks()[0].stop());
+ t.unreached_func('getUserMedia() should fail with invalid resizeMode')();
+ } catch (e) {
+ assert_equals(e.name, 'OverconstrainedError');
+ assert_equals(e.constraint, 'resizeMode');
+ }
+}, 'getUserMedia() fails with exact invalid resizeMode.');
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-firstframe.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-firstframe.https.html
new file mode 100644
index 0000000000..36641b58c9
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-firstframe.https.html
@@ -0,0 +1,106 @@
+<!doctype html>
+<html>
+<head>
+<title>Assigning a MediaStream to a media element and not playing it results in rendering a first frame</title>
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that a HTMLMediaElement with an
+assigned MediaStream with a video track fires the appropriate events to reach
+the "canplay" event and readyState HAVE_ENOUGH_DATA even when not playing or
+autoplaying.</p>
+<video id="vid"></video>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+'use strict';
+const vid = document.getElementById("vid");
+
+promise_test(async t => {
+ const wait = ms => new Promise(r => t.step_timeout(r, ms));
+ const timeout = (promise, time, msg) => Promise.race([
+ promise,
+ wait(time).then(() => Promise.reject(new Error(msg)))
+ ]);
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
+ vid.srcObject = stream;
+
+ await timeout(new Promise(r => vid.oncanplay = r), 8000, "canplay timeout");
+ assert_equals(vid.readyState, vid.HAVE_ENOUGH_DATA,
+ "readyState is HAVE_ENOUGH_DATA after \"canplay\"");
+}, "Tests that loading a MediaStream in a media element eventually results in \"canplay\" even when not playing or autoplaying");
+
+promise_test(async t => {
+ const wait = ms => new Promise(r => t.step_timeout(r, ms));
+ const timeout = (promise, time, msg) => Promise.race([
+ promise,
+ wait(time).then(() => Promise.reject(new Error(msg)))
+ ]);
+ const unexpected = e => assert_unreached(`Got unexpected event ${e.type}`);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.ondurationchange = null;
+ stream.getTracks().forEach(track => track.stop())
+ });
+ vid.srcObject = stream;
+
+ vid.onloadstart = unexpected;
+ vid.ondurationchange = unexpected;
+ vid.onresize = unexpected;
+ vid.onloadedmetadata = unexpected;
+ vid.onloadeddata = unexpected;
+ vid.oncanplay = unexpected;
+ vid.oncanplaythrough = unexpected;
+
+ await timeout(new Promise(r => vid.onloadstart = r), 8000,
+ "loadstart timeout");
+ vid.onloadstart = unexpected;
+
+ await timeout(new Promise(r => vid.ondurationchange = r), 8000,
+ "durationchange timeout");
+ vid.ondurationchange = unexpected;
+ assert_equals(vid.duration, Infinity, "duration changes to Infinity");
+
+ await timeout(new Promise(r => vid.onresize = r), 8000,
+ "resize timeout");
+ vid.onresize = unexpected;
+ assert_not_equals(vid.videoWidth, 0,
+ "videoWidth is something after \"resize\"");
+ assert_not_equals(vid.videoHeight, 0,
+ "videoHeight is something after \"resize\"");
+
+ await timeout(new Promise(r => vid.onloadedmetadata = r), 8000,
+ "loadedmetadata timeout");
+ vid.onloadedmetadata = unexpected;
+ assert_greater_than_equal(vid.readyState, vid.HAVE_METADATA,
+ "readyState is at least HAVE_METADATA after \"loadedmetadata\"");
+
+ await timeout(new Promise(r => vid.onloadeddata = r), 8000,
+ "loadeddata timeout");
+ vid.onloadeddata = unexpected;
+ assert_equals(vid.readyState, vid.HAVE_ENOUGH_DATA,
+ "readyState is HAVE_ENOUGH_DATA after \"loadeddata\" since there's no buffering");
+
+ await timeout(new Promise(r => vid.oncanplay = r), 8000, "canplay timeout");
+ vid.oncanplay = unexpected;
+ assert_equals(vid.readyState, vid.HAVE_ENOUGH_DATA,
+ "readyState is HAVE_ENOUGH_DATA after \"canplay\" since there's no buffering");
+
+ await timeout(new Promise(r => vid.oncanplaythrough = r), 8000,
+ "canplaythrough timeout");
+ vid.oncanplaythrough = unexpected;
+ assert_equals(vid.readyState, vid.HAVE_ENOUGH_DATA,
+ "readyState is HAVE_ENOUGH_DATA after \"canplaythrough\"");
+
+ // Crank the event loop to see whether any more events are fired.
+ await wait(100);
+}, "Tests that loading a MediaStream in a media element sees all the expected (deterministic) events even when not playing or autoplaying");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-preload-none.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-preload-none.https.html
new file mode 100644
index 0000000000..52a13af272
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-preload-none.https.html
@@ -0,0 +1,84 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>Test that the HTMLMediaElement preload 'none' attribute value is ignored for MediaStream used as srcObject and MediaStream object URLs used as src.</title>
+ <link rel="author" title="Matthew Wolenetz" href="mailto:wolenetz@chromium.org"/>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/resources/testdriver.js"></script>
+ <script src="/resources/testdriver-vendor.js"></script>
+ <script src="permission-helper.js"></script>
+ </head>
+ <body>
+ <p class="instructions">When prompted, accept to share your audio and video streams.</p>
+ <p class="instructions">This test checks that the HTMLMediaElement preload 'none' attribute value is ignored for MediaStream used as srcObject and MediaStream object URLs used as src.</p>
+ <div id=log></div>
+
+ <audio preload="none"></audio>
+ <video preload="none"></video>
+
+ <script>
+ async function testPreloadNone(mediaElement, stream)
+ {
+ let rejectSuspendedPromise, rejectErrorPromise, resolveDataLoadedPromise;
+ const suspended = new Promise((r, rej) => {
+ rejectSuspendedPromise = rej;
+ });
+ const errored = new Promise((r, rej) => {
+ rejectErrorPromise = rej;
+ });
+ const loaded = new Promise(resolve => {
+ resolveDataLoadedPromise = resolve;
+ });
+
+ // The optional deferred load steps (for preload none) for MediaStream resources should be skipped.
+ mediaElement.addEventListener("suspend", () => {
+ rejectSuspendedPromise("'suspend' should not be fired.")
+ });
+ mediaElement.addEventListener("error", () => {
+ rejectErrorPromise("'error' should not be fired, code=" + mediaElement.error.code);
+ });
+
+ mediaElement.addEventListener("loadeddata", () => {
+ assert_equals(mediaElement.networkState, mediaElement.NETWORK_LOADING);
+ resolveDataLoadedPromise();
+ });
+
+ mediaElement.srcObject = stream;
+ assert_equals(mediaElement.networkState, mediaElement.NETWORK_NO_SOURCE); // Resource selection is active.
+ try {
+ await Promise.race([suspended, errored, loaded]);
+ } catch (msg) {
+ assert_unreached(msg);
+ }
+2 }
+
+ promise_test(async () =>
+ {
+ const aud = document.querySelector("audio");
+ // camera is needed for the next test, asking for both at once
+ await setMediaPermission();
+ let stream;
+ try {
+ stream = await navigator.mediaDevices.getUserMedia({audio:true});
+ } catch (e) {
+ assert_unreached("getUserMedia error callback was invoked.");
+ }
+ await testPreloadNone(aud, stream);
+ }, "Test that preload 'none' is ignored for MediaStream object URL used as srcObject for audio");
+
+ promise_test(async () =>
+ {
+ const vid = document.querySelector("video");
+ let stream;
+ try {
+ stream = await navigator.mediaDevices.getUserMedia({video:true});
+ } catch (e) {
+ assert_unreached("getUserMedia error callback was invoked.")
+ }
+ await testPreloadNone(vid, stream);
+
+ }, "Test that preload 'none' is ignored for MediaStream used as srcObject for video");
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html
new file mode 100644
index 0000000000..be64123058
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-MediaElement-srcObject.https.html
@@ -0,0 +1,476 @@
+<!doctype html>
+<html>
+<head>
+<title>Assigning mediastream to a video element</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermedia">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the MediaStream object returned by
+the success callback in getUserMedia can be properly assigned to a video element
+via the <code>srcObject</code> attribute.</p>
+
+<audio id="aud"></audio>
+<video id="vid"></video>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+'use strict';
+const vid = document.getElementById("vid");
+
+function queueTask(f) {
+ window.onmessage = f;
+ window.postMessage("hi");
+}
+
+promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+}, "Tests that a MediaStream can be assigned to a video element with srcObject");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ assert_true(!vid.seeking, "A MediaStream is not seekable");
+ assert_equals(vid.seekable.length, 0, "A MediaStream is not seekable");
+}, "Tests that a MediaStream assigned to a video element is not seekable");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ assert_equals(vid.readyState, vid.HAVE_NOTHING,
+ "readyState is HAVE_NOTHING initially");
+ await new Promise(r => vid.onloadeddata = r);
+ assert_equals(vid.readyState, vid.HAVE_ENOUGH_DATA,
+ "Upon having loaded a media stream, the UA sets readyState to HAVE_ENOUGH_DATA");
+}, "Tests that a MediaStream assigned to a video element is in readyState HAVE_NOTHING initially");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ assert_equals(vid.duration, NaN,
+ "A MediaStream does not have any duration initially.");
+ await new Promise(r => vid.ondurationchange = r);
+ assert_equals(vid.duration, Infinity,
+ "A loaded MediaStream does not have a pre-defined duration.");
+
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+ for (const t of stream.getTracks()) {
+ t.stop();
+ }
+
+ await new Promise(r => vid.ondurationchange = r);
+ assert_equals(vid.duration, vid.currentTime,
+ "After ending playback, duration gets set to currentTime");
+}, "Tests that a MediaStream assigned to a video element has expected duration");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+
+ vid.preload = "metadata";
+ vid.srcObject = stream;
+
+ assert_equals(vid.buffered.length, 0,
+ "A MediaStream cannot be preloaded. Therefore, there are no buffered timeranges");
+ assert_equals(vid.preload, "none", "preload must always be none");
+ vid.preload = "auto";
+ assert_equals(vid.preload, "none", "Setting preload must be ignored");
+
+ await new Promise(r => vid.onloadeddata = r);
+ assert_equals(vid.buffered.length, 0,
+ "A MediaStream cannot be preloaded. Therefore, there are no buffered timeranges");
+
+ vid.srcObject = null;
+
+ assert_equals(vid.preload, "metadata",
+ "The preload attribute returns the value it had before using a MediaStream");
+}, "Tests that a video element with a MediaStream assigned is not preloaded");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+
+ vid.defaultPlaybackRate = 0.3;
+ vid.playbackRate = 0.3;
+ vid.onratechange = t.unreached_func("ratechange event must not be fired");
+ vid.srcObject = stream;
+
+ assert_equals(vid.defaultPlaybackRate, 1, "playback rate is always 1");
+ vid.defaultPlaybackRate = 0.5;
+ assert_equals(vid.defaultPlaybackRate, 1,
+ "Setting defaultPlaybackRate must be ignored");
+
+ assert_equals(vid.playbackRate, 1, "playback rate is always 1");
+ vid.playbackRate = 0.5;
+ assert_equals(vid.playbackRate, 1, "Setting playbackRate must be ignored");
+
+ vid.srcObject = null;
+ assert_equals(vid.defaultPlaybackRate, 0.3,
+ "The defaultPlaybackRate attribute returns the value it had before using a MediaStream");
+ assert_equals(vid.playbackRate, 0.3,
+ "The playbackRate attribute is set to the value of the defaultPlaybackRate attribute when unsetting srcObject");
+
+ // Check that there's no ratechange event
+ await new Promise(r => t.step_timeout(r, 100));
+}, "Tests that a video element with a MediaStream assigned ignores playbackRate attributes (defaultPlaybackRate is identical)");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+
+ vid.defaultPlaybackRate = 0.3;
+ vid.playbackRate = 0.4;
+ vid.onratechange = t.unreached_func("ratechange event must not be fired");
+ vid.srcObject = stream;
+
+ assert_equals(vid.defaultPlaybackRate, 1, "playback rate is always 1");
+ vid.defaultPlaybackRate = 0.5;
+ assert_equals(vid.defaultPlaybackRate, 1,
+ "Setting defaultPlaybackRate must be ignored");
+
+ assert_equals(vid.playbackRate, 1, "playback rate is always 1");
+ vid.playbackRate = 0.5;
+ assert_equals(vid.playbackRate, 1, "Setting playbackRate must be ignored");
+
+ vid.srcObject = null;
+ assert_equals(vid.defaultPlaybackRate, 0.3,
+ "The defaultPlaybackRate attribute returns the value it had before using a MediaStream");
+ assert_equals(vid.playbackRate, 0.3,
+ "The playbackRate attribute is set to the value of the defaultPlaybackRate attribute when unsetting srcObject (and fires ratechange)");
+ await new Promise(r => vid.onratechange = r);
+}, "Tests that a video element with a MediaStream assigned ignores playbackRate attributes (defaultPlaybackRate is different)");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+ await new Promise(r => vid.oncanplay = r);
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+ assert_greater_than(vid.currentTime, 0,
+ "currentTime is greater than 0 after first timeupdate");
+
+ assert_equals(vid.played.length, 1,
+ "A MediaStream's timeline always consists of a single range");
+ assert_equals(vid.played.start(0), 0,
+ "A MediaStream's timeline always starts at zero");
+ assert_equals(vid.played.end(0), vid.currentTime,
+ "A MediaStream's end MUST return the last known currentTime");
+
+ const time = vid.currentTime;
+ vid.currentTime = 0;
+ assert_equals(vid.currentTime, time,
+ "The UA MUST ignore attempts to set the currentTime attribute");
+}, "Tests that a media element with an assigned MediaStream reports the played attribute as expected");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ assert_equals(vid.currentTime, 0, "The initial value is 0");
+ vid.currentTime = 42;
+ assert_equals(vid.currentTime, 0,
+ "The UA MUST ignore attempts to set the currentTime attribute (default playback start position)");
+
+ await new Promise(r => vid.onloadeddata = r);
+ assert_equals(vid.currentTime, 0, "The initial value is 0");
+ vid.currentTime = 42;
+ assert_equals(vid.currentTime, 0,
+ "The UA MUST ignore attempts to set the currentTime attribute (official playback position)");
+
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+ assert_greater_than(vid.currentTime, 0,
+ "currentTime is greater than 0 after first timeupdate");
+
+ const lastTime = vid.currentTime;
+ vid.currentTime = 0;
+ assert_equals(vid.currentTime, lastTime,
+ "The UA MUST ignore attempts to set the currentTime attribute (restart)");
+
+ for(const t of stream.getTracks()) {
+ t.stop();
+ }
+ await new Promise(r => vid.onended = r);
+ assert_greater_than_equal(vid.currentTime, lastTime,
+ "currentTime advanced after stopping");
+}, "Tests that a media element with an assigned MediaStream reports the currentTime attribute as expected");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ await new Promise(r => t.step_timeout(r, 500));
+
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+ assert_between_exclusive(vid.currentTime, 0, 0.5,
+ "currentTime starts at 0 and has progressed at first timeupdate");
+}, "Tests that a media element with an assigned MediaStream starts its timeline at 0 regardless of when the MediaStream was created");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+
+ vid.pause();
+ const pauseCurrentTime = vid.currentTime;
+
+ await new Promise(r => vid.onpause = r);
+ vid.ontimeupdate = () => assert_unreached("No timeupdate while paused");
+
+ await new Promise(r => t.step_timeout(r, 500));
+ assert_equals(vid.currentTime, pauseCurrentTime,
+ "currentTime does not change while paused");
+
+ vid.play();
+
+ await new Promise(r => vid.ontimeupdate = r);
+ assert_between_exclusive(vid.currentTime - pauseCurrentTime, 0, 0.5,
+ "currentTime does not skip ahead after pause");
+}, "Tests that a media element with an assigned MediaStream does not advance currentTime while paused");
+
+promise_test(async t => {
+ const canvas = document.createElement("canvas");
+ const ctx = canvas.getContext("2d");
+ const stream = canvas.captureStream();
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ vid.srcObject = stream;
+
+ vid.ontimeupdate = () =>
+ assert_unreached("No timeupdate until potentially playing");
+
+ vid.play();
+
+ await new Promise(r => t.step_timeout(r, 1000));
+ assert_equals(vid.readyState, vid.HAVE_NOTHING,
+ "Video dimensions not known yet");
+
+ const start = performance.now();
+ ctx.fillStyle = "green";
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+ // Wait for, and check, potentially playing
+ await new Promise(r => vid.oncanplay = r);
+ const canplayDuration = (performance.now() - start) / 1000;
+ // "canplay" was just dispatched from a task queued when the element became
+ // potentially playing. currentTime may not have progressed more than the time
+ // it took from becoming potentially playing to starting the
+ // canplay-dispatching task. Though the media clock and the js clock may be
+ // different, so we take double this duration, or 100ms, whichever is greater,
+ // as a safety margin.
+ const margin = Math.max(0.1, canplayDuration * 2);
+ assert_between_inclusive(vid.currentTime, 0, margin,
+ "currentTime has not advanced more than twice it took to dispatch canplay");
+ assert_false(vid.paused, "Media element is not paused");
+ assert_false(vid.ended, "Media element is not ended");
+ assert_equals(vid.error, null,
+ "Media element playback has not stopped due to errors");
+ assert_greater_than(vid.readyState, vid.HAVE_CURRENT_DATA,
+ "Media element playback is not blocked");
+ // Unclear how to check for "paused for user interaction" and "paused for
+ // in-band content".
+
+ await new Promise(r => vid.ontimeupdate = r);
+ assert_between_exclusive(vid.currentTime, 0, 1,
+ "currentTime advances while potentially playing");
+}, "Tests that a media element with an assigned MediaStream does not start advancing currentTime until potentially playing");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => {
+ vid.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ assert_equals(vid.loop, false, "loop is false by default");
+ vid.srcObject = stream;
+
+ vid.loop = true;
+ assert_equals(vid.loop, true,
+ "loop can be changed when assigned a MediaStream");
+
+ await new Promise(r => vid.onloadeddata = r);
+ vid.loop = false;
+ assert_equals(vid.loop, false,
+ "loop can be changed when having loaded a MediaStream");
+
+ vid.play();
+ await new Promise(r => vid.ontimeupdate = r);
+ vid.loop = true;
+ assert_equals(vid.loop, true,
+ "loop can be changed when playing a MediaStream");
+
+ for(const t of stream.getTracks()) {
+ t.stop();
+ }
+ // If loop is ignored, we get "ended",
+ // otherwise the media element sets currentTime to 0 without ending.
+ await new Promise(r => vid.onended = r);
+}, "Tests that the loop attribute has no effect on a media element with an assigned MediaStream");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => { vid.srcObject = null; });
+ vid.srcObject = stream;
+
+ await vid.play();
+
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+
+ assert_false(stream.active, "MediaStream becomes inactive with only ended tracks");
+ assert_false(vid.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (sync)");
+
+ await Promise.resolve();
+ assert_false(vid.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (microtask)");
+
+ let ended = false;
+ vid.onended = () => ended = true;
+ await new Promise(r => queueTask(r));
+
+ assert_true(vid.ended, "HTMLMediaElement becomes ended asynchronously when its MediaStream provider becomes inactive");
+ assert_true(ended, "HTMLMediaElement fires the ended event asynchronously when its MediaStream provider becomes inactive");
+}, "Tests that a media element with an assigned MediaStream ends when the MediaStream becomes inactive through tracks ending");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
+ t.add_cleanup(() => {
+ aud.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ aud.srcObject = stream;
+
+ await aud.play();
+
+ for (const track of stream.getAudioTracks()) {
+ track.stop();
+ }
+
+ assert_true(stream.active, "MediaStream is still active with a live video track");
+ assert_false(aud.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (sync)");
+
+ await Promise.resolve();
+ assert_false(aud.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (microtask)");
+
+ let ended = false;
+ aud.onended = () => ended = true;
+ await new Promise(r => queueTask(r));
+
+ assert_true(aud.ended, "HTMLAudioElement becomes ended asynchronously when its MediaStream provider becomes inaudible");
+ assert_true(ended, "HTMLAudioElement fires the ended event asynchronously when its MediaStream provider becomes inaudible");
+}, "Tests that an audio element with an assigned MediaStream ends when the MediaStream becomes inaudible through audio tracks ending");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => { vid.srcObject = null; });
+ vid.srcObject = stream;
+
+ await vid.play();
+
+ for (const track of stream.getTracks()) {
+ stream.removeTrack(track);
+ }
+
+ assert_false(stream.active, "MediaStream becomes inactive with no tracks");
+ assert_false(vid.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (sync)");
+
+ await Promise.resolve();
+ assert_false(vid.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (microtask)");
+
+ let ended = false;
+ vid.onended = () => ended = true;
+ await new Promise(r => queueTask(r));
+
+ assert_true(vid.ended, "HTMLMediaElement becomes ended asynchronously when its MediaStream provider becomes inactive");
+ assert_true(ended, "HTMLMediaElement fires the ended event asynchronously when its MediaStream provider becomes inactive");
+}, "Tests that a media element with an assigned MediaStream ends when the MediaStream becomes inactive through track removal");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
+ t.add_cleanup(() => {
+ aud.srcObject = null;
+ stream.getTracks().forEach(track => track.stop());
+ });
+ aud.srcObject = stream;
+
+ await aud.play();
+
+ for (const track of stream.getAudioTracks()) {
+ stream.removeTrack(track);
+ }
+
+ assert_true(stream.active, "MediaStream is still active with a live video track");
+ assert_false(aud.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (sync)");
+
+ await Promise.resolve();
+ assert_false(aud.ended, "HTMLMediaElement reports ended the next time the event loop reaches step 1 (microtask)");
+
+ let ended = false;
+ aud.onended = () => ended = true;
+ await new Promise(r => queueTask(r));
+
+ assert_true(aud.ended, "HTMLAudioElement becomes ended asynchronously when its MediaStream provider becomes inaudible");
+ assert_true(ended, "HTMLAudioElement fires the ended event asynchronously when its MediaStream provider becomes inaudible");
+}, "Tests that an audio element with an assigned MediaStream ends when the MediaStream becomes inaudible through track removal");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-add-audio-track.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-add-audio-track.https.html
new file mode 100644
index 0000000000..880941b3ba
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-add-audio-track.https.html
@@ -0,0 +1,42 @@
+<!doctype html>
+<html>
+<head>
+<title>Adding a track to a MediaStream</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrackList-add-void-MediaStreamTrack-track">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#event-mediastream-addtrack">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio stream, then your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that adding a track to a MediaStream works as expected.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async t => {
+ await setMediaPermission();
+ const audio = await navigator.mediaDevices.getUserMedia({audio: true});
+ const video = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_equals(video.getAudioTracks().length, 0, "video mediastream starts with no audio track");
+ video.addTrack(audio.getAudioTracks()[0]);
+ assert_equals(video.getAudioTracks().length, 1, "video mediastream has now one audio track");
+ video.addTrack(audio.getAudioTracks()[0]);
+ // If track is already in stream's track set, then abort these steps.
+ assert_equals(video.getAudioTracks().length, 1, "video mediastream still has one audio track");
+
+ audio.onaddtrack = t.step_func(function () {
+ assert_unreached("onaddtrack is not fired when the script directly modified the track of a mediastream");
+ });
+
+ assert_equals(audio.getVideoTracks().length, 0, "audio mediastream starts with no video track");
+ audio.addTrack(video.getVideoTracks()[0]);
+ assert_equals(audio.getVideoTracks().length, 1, "audio mediastream now has one video track");
+}, "Tests that adding a track to a MediaStream works as expected");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-audio-only.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-audio-only.https.html
new file mode 100644
index 0000000000..033a7cc76e
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-audio-only.https.html
@@ -0,0 +1,32 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia({audio:true}) creates a stream with at least an audio track</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-kind">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the MediaStream object returned by
+the success callback in getUserMedia has exactly one audio track.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({audio:true});
+ assert_true(stream instanceof MediaStream, "getUserMedia success callback comes with a MediaStream object");
+ assert_equals(stream.getAudioTracks().length, 1, "the media stream has exactly one audio track");
+ assert_equals(stream.getAudioTracks()[0].kind, "audio", "getAudioTracks() returns a sequence of tracks whose kind is 'audio'");
+ assert_equals(stream.getVideoTracks().length, 0, "the media stream has zero video track");
+}, "Tests that a MediaStream with exactly one audio track is returned");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-clone.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-clone.https.html
new file mode 100644
index 0000000000..0fe6f3498b
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-clone.https.html
@@ -0,0 +1,98 @@
+<!doctype html>
+<html>
+<head>
+<title>MediaStream and MediaStreamTrack clone()</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediastream-clone">
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#dom-mediastreamtrack-clone">
+</head>
+<body>
+<p class="instructions">When prompted, accept to give permission to use your audio and video devices.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that cloning MediaStreams and MediaStreamTracks works as expected.</p>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+
+promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
+ assert_equals(stream.getAudioTracks().length, 1);
+ assert_equals(stream.getVideoTracks().length, 1);
+
+ const clone1 = stream.clone();
+ assert_equals(clone1.getAudioTracks().length, 1);
+ assert_equals(clone1.getVideoTracks().length, 1);
+ assert_not_equals(stream.getAudioTracks()[0].id, clone1.getAudioTracks()[0].id);
+ assert_not_equals(stream.getVideoTracks()[0].id, clone1.getVideoTracks()[0].id);
+
+ stream.getTracks().forEach(track => track.stop());
+ assert_false(stream.active);
+ assert_equals(stream.getAudioTracks()[0].readyState, "ended");
+ assert_equals(stream.getVideoTracks()[0].readyState, "ended");
+ assert_true(clone1.active);
+ assert_equals(clone1.getAudioTracks()[0].readyState, "live");
+ assert_equals(clone1.getVideoTracks()[0].readyState, "live");
+
+ clone1.getAudioTracks()[0].stop();
+ assert_true(clone1.active);
+ assert_equals(clone1.getAudioTracks()[0].readyState, "ended");
+ assert_equals(clone1.getVideoTracks()[0].readyState, "live");
+
+ const clone2 = clone1.clone();
+ assert_true(clone2.active);
+ assert_equals(clone2.getAudioTracks()[0].readyState, "ended");
+ assert_equals(clone2.getVideoTracks()[0].readyState, "live");
+
+ clone1.getVideoTracks()[0].stop();
+ clone2.getVideoTracks()[0].stop();
+
+ const clone3 = clone2.clone();
+ assert_false(clone3.active);
+ assert_equals(clone3.getAudioTracks()[0].readyState, "ended");
+ assert_equals(clone3.getVideoTracks()[0].readyState, "ended");
+ assert_not_equals(clone1.getAudioTracks()[0].id, clone2.getAudioTracks()[0].id);
+ assert_not_equals(clone1.getVideoTracks()[0].id, clone2.getVideoTracks()[0].id);
+ assert_not_equals(clone2.getAudioTracks()[0].id, clone3.getAudioTracks()[0].id);
+ assert_not_equals(clone2.getVideoTracks()[0].id, clone3.getVideoTracks()[0].id);
+ assert_not_equals(clone1.getAudioTracks()[0].id, clone3.getAudioTracks()[0].id);
+ assert_not_equals(clone1.getVideoTracks()[0].id, clone3.getVideoTracks()[0].id);
+}, "Tests that cloning MediaStream objects works as expected");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
+ assert_equals(stream.getAudioTracks().length, 1);
+ assert_equals(stream.getVideoTracks().length, 1);
+ assert_equals(stream.getAudioTracks()[0].readyState, "live");
+ assert_equals(stream.getVideoTracks()[0].readyState, "live");
+ assert_true(stream.active);
+
+ const audio_clone = stream.getAudioTracks()[0].clone();
+ const video_clone = stream.getVideoTracks()[0].clone();
+ assert_equals(audio_clone.readyState, "live");
+ assert_equals(video_clone.readyState, "live");
+ assert_not_equals(stream.getAudioTracks()[0].id, audio_clone.id);
+ assert_not_equals(stream.getVideoTracks()[0].id, video_clone.id);
+
+ stream.getTracks().forEach(track => track.stop());
+ assert_false(stream.active);
+ assert_equals(stream.getAudioTracks()[0].readyState, "ended");
+ assert_equals(stream.getVideoTracks()[0].readyState, "ended");
+ assert_equals(audio_clone.readyState, "live");
+ assert_equals(video_clone.readyState, "live");
+
+ stream.addTrack(audio_clone);
+ stream.addTrack(video_clone);
+ assert_true(stream.active);
+
+ stream.getTracks().forEach(track => track.stop());
+ assert_false(stream.active);
+ assert_equals(audio_clone.readyState, "ended");
+ assert_equals(video_clone.readyState, "ended");
+}, "Tests that cloning MediaStreamTrack objects works as expected");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-default-feature-policy.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-default-feature-policy.https.html
new file mode 100644
index 0000000000..b81404bf33
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-default-feature-policy.https.html
@@ -0,0 +1,84 @@
+<!DOCTYPE html>
+<body>
+<script src=/resources/testharness.js></script>
+ <script src=/resources/testharnessreport.js></script>
+ <script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+ <script src=/common/get-host-info.sub.js></script>
+ <script src=/feature-policy/resources/featurepolicy.js></script>
+ <script>
+ 'use strict';
+ async function gUM({audio, video}) {
+ let stream;
+ if (!page_loaded_in_iframe()) {
+ await setMediaPermission();
+ }
+ try {
+ stream = await navigator.mediaDevices.getUserMedia({audio, video});
+ // getUserMedia must guarantee the number of tracks requested or fail.
+ if ((audio && stream.getAudioTracks().length == 0) ||
+ (video && stream.getVideoTracks().length == 0)) {
+ throw {name: `All requested devices must be present with ` +
+ `audio ${audio} and video ${video}, or fail`};
+ }
+ } finally {
+ if (stream) {
+ stream.getTracks().forEach(track => track.stop());
+ }
+ }
+ }
+
+ async function must_disallow_gUM({audio, video}) {
+ try {
+ await gUM({audio, video});
+ } catch (e) {
+ if (e.name == 'NotAllowedError') {
+ return;
+ }
+ throw e;
+ }
+ throw {name: `audio ${audio} and video ${video} constraints must not be ` +
+ `allowed.`};
+ }
+
+ const cross_domain = get_host_info().HTTPS_REMOTE_ORIGIN;
+ run_all_fp_tests_allow_self(
+ cross_domain,
+ 'microphone',
+ 'NotAllowedError',
+ async () => {
+ await gUM({audio: true});
+ if (window.location.href.includes(cross_domain)) {
+ await must_disallow_gUM({video: true});
+ await must_disallow_gUM({audio: true, video: true});
+ }
+ }
+ );
+
+ run_all_fp_tests_allow_self(
+ cross_domain,
+ 'camera',
+ 'NotAllowedError',
+ async () => {
+ await gUM({video: true});
+ if (window.location.href.includes(cross_domain)) {
+ await must_disallow_gUM({audio: true});
+ await must_disallow_gUM({audio: true, video: true});
+ }
+ }
+ );
+
+ run_all_fp_tests_allow_self(
+ cross_domain,
+ 'camera;microphone',
+ 'NotAllowedError',
+ async () => {
+ await gUM({audio: true, video: true});
+ await gUM({audio: true});
+ await gUM({video: true});
+ }
+ );
+ </script>
+</body>
+
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-finished-add.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-finished-add.https.html
new file mode 100644
index 0000000000..797db0444e
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-finished-add.https.html
@@ -0,0 +1,35 @@
+<!doctype html>
+<html>
+<head>
+<title>Adding a track to an inactive MediaStream</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#widl-MediaStream-addTrack-void-MediaStreamTrack-track">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#widl-MediaStreamTrack-stop-void">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio stream, then
+your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that adding a track to an inactive
+MediaStream is allowed.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission();
+ const audio = await navigator.mediaDevices.getUserMedia({audio:true});
+ const video = await navigator.mediaDevices.getUserMedia({video:true});
+ audio.getAudioTracks()[0].stop();
+ assert_false(audio.active, "audio stream is inactive after stopping its only audio track");
+ assert_true(video.active, "video stream is active");
+ audio.addTrack(video.getVideoTracks()[0]);
+ audio.removeTrack(audio.getAudioTracks()[0]);
+}, "Tests that adding a track to an inactive MediaStream is allowed");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-gettrackid.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-gettrackid.https.html
new file mode 100644
index 0000000000..9a4aef9782
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-gettrackid.https.html
@@ -0,0 +1,29 @@
+<!doctype html>
+<html>
+<head>
+<title>Retrieving a track from a MediaStream</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStream-getTrackById-MediaStreamTrack-DOMString-trackId">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that MediaStream.getTrackById behaves as expected</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ var track = stream.getVideoTracks()[0];
+ assert_equals(track, stream.getTrackById(track.id), "getTrackById returns track of given id");
+ assert_equals(stream.getTrackById(track.id + "foo"), null, "getTrackById of inexistant id returns null");
+}, "Tests that MediaStream.getTrackById works as expected");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-id.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-id.https.html
new file mode 100644
index 0000000000..3c4fe0529b
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-id.https.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia() creates a stream with a proper id</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStream-id">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the MediaStream object returned by
+the success callback in getUserMedia has a correct id.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+const allowedCharacters = /^[\u0021\u0023-\u0027\u002A-\u002B\u002D-\u002E\u0030-\u0039\u0041-\u005A\u005E-\u007E]*$/;
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video:true});
+ assert_equals(stream.id.length, 36, "the media stream id has 36 characters");
+ assert_regexp_match(stream.id, allowedCharacters, "the media stream id uses the set of allowed characters");
+}, "Tests that a MediaStream with a correct id is returned");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-idl.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-idl.https.html
new file mode 100644
index 0000000000..49c6bc07ca
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-idl.https.html
@@ -0,0 +1,77 @@
+<!doctype html>
+<html>
+<head>
+<title>MediaStream constructor algorithm</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#idl-def-MediaStream">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#widl-MediaStream-id">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#mediastream">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#event-mediastreamtrack-ended">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#widl-MediaStreamTrack-stop-void">
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#widl-MediaStreamTrack-clone-MediaStreamTrack">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video and audio stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the MediaStream constructor
+follows the algorithm set in the spec.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+ promise_test(async () => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio:true})
+ let stream1 = new MediaStream();
+ assert_not_equals(stream.id, stream1.id, "Two different MediaStreams have different ids");
+ let stream2 = new MediaStream(stream);
+ assert_not_equals(stream.id, stream2.id, "A MediaStream constructed from another has a different id");
+ let audioTrack1 = stream.getAudioTracks()[0];
+ let videoTrack = stream.getVideoTracks()[0];
+ assert_equals(audioTrack1, stream2.getAudioTracks()[0], "A MediaStream constructed from another shares the same audio track");
+ assert_equals(videoTrack, stream2.getVideoTracks()[0], "A MediaStream constructed from another shares the same video track");
+ let stream4 = new MediaStream([audioTrack1]);
+ assert_equals(stream4.getTrackById(audioTrack1.id), audioTrack1, "a non-ended track gets added via the MediaStream constructor");
+
+ let audioTrack2 = audioTrack1.clone();
+ audioTrack2.addEventListener("ended", () => {
+ throw new Error("ended event should not be fired by MediaStreamTrack.stop().")
+ });
+ audioTrack2.stop();
+ assert_equals(audioTrack2.readyState, "ended", "a stopped track is marked ended synchronously");
+
+ let stream3 = new MediaStream([audioTrack2, videoTrack]);
+ assert_equals(stream3.getTrackById(audioTrack2.id), audioTrack2, "an ended track gets added via the MediaStream constructor");
+ assert_equals(stream3.getTrackById(videoTrack.id), videoTrack, "a non-ended track gets added via the MediaStream constructor even if the previous track was ended");
+
+ let stream5 = new MediaStream([audioTrack2]);
+ assert_equals(stream5.getTrackById(audioTrack2.id), audioTrack2, "an ended track gets added via the MediaStream constructor");
+ assert_false(stream5.active, "a MediaStream created using the MediaStream() constructor whose arguments are lists of MediaStreamTrack objects that are all ended, the MediaStream object MUST be created with its active attribute set to false");
+
+ audioTrack1.stop();
+ assert_equals(audioTrack1.readyState, "ended",
+ "Stopping audioTrack1 marks it ended synchronously");
+
+ videoTrack.stop();
+ assert_equals(videoTrack.readyState, "ended",
+ "Stopping videoTrack marks it ended synchronously");
+
+ assert_false(stream.active,
+ "The original MediaStream is marked inactive synchronously");
+ assert_false(stream1.active,
+ "MediaStream 1 is marked inactive synchronously");
+ assert_false(stream2.active,
+ "MediaStream 2 is marked inactive synchronously");
+ assert_false(stream3.active,
+ "MediaStream 3 is marked inactive synchronously");
+ assert_false(stream4.active,
+ "MediaStream 4 is marked inactive synchronously");
+
+}, "Tests that a MediaStream constructor follows the algorithm set in the spec");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-removetrack.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-removetrack.https.html
new file mode 100644
index 0000000000..6c9b9b02e3
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-removetrack.https.html
@@ -0,0 +1,140 @@
+<!doctype html>
+<html>
+<head>
+<title>Removing a track from a MediaStream</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrackList-remove-void-MediaStreamTrack-track">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#event-mediastream-removetrack">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio stream, then your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that removinging a track from a MediaStream works as expected.</p>
+<video id="video" height="120" width="160" autoplay muted></video>
+<audio id="audio" autoplay muted></audio>
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+
+promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
+ const tracks = stream.getTracks();
+ t.add_cleanup(() => tracks.forEach(track => track.stop()));
+ const stream2 = await navigator.mediaDevices.getUserMedia({audio: true});
+ tracks.push(...stream2.getTracks());
+
+ stream.onremovetrack = stream2.onremovetrack = t.step_func(() =>
+ assert_unreached("onremovetrack is not triggered by script itself"));
+
+ assert_equals(stream.getTracks().length, 2, "mediastream starts with 2 tracks");
+ stream.removeTrack(stream.getVideoTracks()[0]);
+ assert_equals(stream.getTracks().length, 1, "mediastream has 1 track left");
+ stream.removeTrack(stream.getAudioTracks()[0]);
+ assert_equals(stream.getTracks().length, 0, "mediastream has no tracks left");
+ stream.removeTrack(stream2.getTracks()[0]); // should not throw
+
+ // Allow time to verify no events fire.
+ await new Promise(r => t.step_timeout(r, 1));
+
+}, "Tests that a removal from a MediaStream works as expected");
+
+async function doesEventFire(t, target, name, ms = 1) {
+ const cookie = {};
+ const value = await Promise.race([
+ new Promise(r => target.addEventListener(name, r, {once: true})),
+ new Promise(r => t.step_timeout(r, ms)).then(() => cookie)
+ ]);
+ return value !== cookie;
+}
+
+const doEventsFire = (t, target1, target2, name, ms = 1) => Promise.all([
+ doesEventFire(t, target1, "ended", ms),
+ doesEventFire(t, target2, "ended", ms)
+]);
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
+ const tracks = stream.getTracks();
+
+ audio.srcObject = video.srcObject = stream;
+
+ t.add_cleanup(() => {
+ for (const track of tracks) {
+ track.stop();
+ }
+ audio.srcObject = video.srcObject = null;
+ });
+
+ await Promise.all([
+ new Promise(r => audio.onloadedmetadata = r),
+ new Promise(r => video.onloadedmetadata = r)
+ ]);
+
+ assert_equals(audio.ended, false, "audio element starts out not ended");
+ assert_equals(video.ended, false, "video element starts out not ended");
+
+ stream.removeTrack(stream.getVideoTracks()[0]);
+ {
+ const [audioDidEnd, videoDidEnd] = await doEventsFire(t, audio, video, "ended");
+ assert_equals(audio.ended, false, "audio element unaffected");
+ assert_equals(audioDidEnd, false, "no audio ended event should fire yet");
+ assert_equals(video.ended, false, "video element keeps going with audio track");
+ assert_equals(videoDidEnd, false, "no video ended event should fire yet");
+ }
+ stream.removeTrack(stream.getAudioTracks()[0]);
+ {
+ const [audioDidEnd, videoDidEnd] = await doEventsFire(t, audio, video, "ended");
+ assert_equals(audio.ended, true, "audio element ended because no more audio tracks");
+ assert_equals(audioDidEnd, true, "go audio ended event");
+ assert_equals(video.ended, true, "video element ended because no more tracks");
+ assert_equals(videoDidEnd, true, "got video ended event");
+ }
+}, "Test that removal from a MediaStream fires ended on media elements (video first)");
+
+promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
+ const tracks = stream.getTracks();
+
+ audio.srcObject = video.srcObject = stream;
+
+ t.add_cleanup(() => {
+ for (const track of tracks) {
+ track.stop();
+ }
+ audio.srcObject = video.srcObject = null;
+ });
+
+ await Promise.all([
+ new Promise(r => audio.onloadedmetadata = r),
+ new Promise(r => video.onloadedmetadata = r)
+ ]);
+
+ assert_equals(audio.ended, false, "audio element starts out not ended");
+ assert_equals(video.ended, false, "video element starts out not ended");
+
+ stream.removeTrack(stream.getAudioTracks()[0]);
+ {
+ const [audioDidEnd, videoDidEnd] = await doEventsFire(t, audio, video, "ended");
+ assert_equals(audio.ended, true, "audio element ended because no more audio tracks");
+ assert_equals(audioDidEnd, true, "got audio ended event");
+ assert_equals(video.ended, false, "video element keeps going with video track");
+ assert_equals(videoDidEnd, false, "no video ended event should fire yet");
+ }
+ stream.removeTrack(stream.getVideoTracks()[0]);
+ {
+ const [audioDidEnd, videoDidEnd] = await doEventsFire(t, audio, video, "ended");
+ assert_equals(audio.ended, true, "audio element remains ended from before");
+ assert_equals(audioDidEnd, false, "no second audio ended event should fire");
+ assert_equals(video.ended, true, "video element ended because no more tracks");
+ assert_equals(videoDidEnd, true, "got video ended event");
+ }
+}, "Test that removal from a MediaStream fires ended on media elements (audio first)");
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-supported-by-feature-policy.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-supported-by-feature-policy.html
new file mode 100644
index 0000000000..63f565926a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-supported-by-feature-policy.html
@@ -0,0 +1,15 @@
+<!DOCTYPE html>
+<title>Test that camera and microphone are advertised in the feature list</title>
+<link rel="help" href="https://w3c.github.io/webappsec-feature-policy/#dom-featurepolicy-features">
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#feature-policy-integration">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+test(() => {
+ assert_in_array('camera', document.featurePolicy.features());
+}, 'document.featurePolicy.features should advertise camera.');
+
+test(() => {
+ assert_in_array('microphone', document.featurePolicy.features());
+}, 'document.featurePolicy.features should advertise microphone.');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStream-video-only.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStream-video-only.https.html
new file mode 100644
index 0000000000..6e0a6cc9cf
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStream-video-only.https.html
@@ -0,0 +1,32 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia({video:true}) creates a stream with one video track</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-NavigatorUserMedia-getUserMedia-void-MediaStreamConstraints-constraints-NavigatorUserMediaSuccessCallback-successCallback-NavigatorUserMediaErrorCallback-errorCallback">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-kind">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the MediaStream object returned by
+the success callback in getUserMedia has exactly one video track and no audio.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ assert_true(stream instanceof MediaStream, "getUserMedia success callback comes with a MediaStream object");
+ assert_equals(stream.getAudioTracks().length, 0, "the media stream has zero audio track");
+ assert_equals(stream.getVideoTracks().length, 1, "the media stream has exactly one video track");
+ assert_equals(stream.getVideoTracks()[0].kind, "video", "getAudioTracks() returns a sequence of tracks whose kind is 'video'");
+}, "Tests that a MediaStream with at least one video track is returned");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-audio-is-silence.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-audio-is-silence.https.html
new file mode 100644
index 0000000000..4ad2340ed5
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-audio-is-silence.https.html
@@ -0,0 +1,59 @@
+<!doctype html>
+<html>
+<head>
+<title>A disabled audio track is rendered as silence</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#introduction">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#mediastreams-as-media-elements">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that a disabled audio track in a
+MediaStream is rendered as silence. It relies on the
+<a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html">
+Web Audio API</a>.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+const aud = document.getElementById("aud");
+promise_test(async t => {
+ await setMediaPermission("granted", ["microphone"]);
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true});
+ var ctx = new AudioContext();
+ var streamSource = ctx.createMediaStreamSource(stream);
+ var silenceDetector = ctx.createScriptProcessor(1024);
+ var count = 10;
+ let resolveAudioProcessPromise;
+ const audioProcessed = new Promise(res => resolveAudioProcessPromise = res)
+
+ silenceDetector.onaudioprocess = function (e) {
+ var buffer1 = e.inputBuffer.getChannelData(0);
+ var buffer2 = e.inputBuffer.getChannelData(1);
+ var out = e.outputBuffer.getChannelData(0);
+ out = new Float32Array(buffer1);
+ for (var i = 0; i < buffer1.length; i++) {
+ assert_equals(buffer1[i], 0, "Audio buffer entry #" + i + " in channel 0 is silent");
+ }
+ for (var i = 0; i < buffer2.length; i++) {
+ assert_equals(buffer2[i], 0, "Audio buffer entry #" + i + " in channel 1 is silent");
+ }
+ count--;
+ if (count === 0) {
+ silenceDetector.onaudioprocess = null;
+ resolveAudioProcessPromise();
+ }
+ };
+ stream.getAudioTracks()[0].enabled = false;
+
+ streamSource.connect(silenceDetector);
+ silenceDetector.connect(ctx.destination);
+}, "Tests that a disabled audio track in a MediaStream is rendered as silence");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-video-is-black.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-video-is-black.https.html
new file mode 100644
index 0000000000..3652362b84
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-MediaElement-disabled-video-is-black.https.html
@@ -0,0 +1,56 @@
+<!doctype html>
+<html>
+<head>
+<title>A disabled video track is rendered as blackness</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#introduction">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#mediastreams-as-media-elements">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that a disabled video track in a
+MediaStream is rendered as blackness.</p>
+<video id="vid"></video>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+const vid = document.getElementById("vid");
+const cv = document.createElement("canvas");
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ if (stream.getVideoTracks()[0].enabled) {
+ stream.getVideoTracks()[0].enabled = false;
+ }
+
+ let resolveLoadedPromise;
+ const videoLoaded = new Promise(res => resolveLoadedPromise = res)
+ var testOnceLoadeddata = function() {
+ vid.removeEventListener("loadeddata", testOnceLoadeddata, false);
+ cv.width = vid.offsetWidth;
+ cv.height = vid.offsetHeight;
+ var ctx = cv.getContext("2d");
+ ctx.drawImage(vid,0,0);
+ var imageData = ctx.getImageData(0, 0, cv.width, cv.height);
+ for (var i = 0; i < imageData.data.length; i+=4) {
+ assert_equals(imageData.data[i], 0, "No red component in pixel #" + i);
+ assert_equals(imageData.data[i + 1], 0, "No green component in pixel #" + i);
+ assert_equals(imageData.data[i + 2], 0, "No blue component in pixel #" + i);
+ assert_equals(imageData.data[i + 3], 255, "No transparency in pixel #" + i);
+ }
+ resolveLoadedPromise();
+ };
+
+ vid.srcObject = stream;
+ vid.play();
+ vid.addEventListener("loadeddata", testOnceLoadeddata, false);
+}, "Tests that a disabled video track in a MediaStream is rendered as blackness");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-applyConstraints.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-applyConstraints.https.html
new file mode 100644
index 0000000000..fd451f86cf
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-applyConstraints.https.html
@@ -0,0 +1,83 @@
+<!doctype html>
+<title>MediaStreamTrack applyConstraints</title>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+ 'use strict'
+
+ // https://w3c.github.io/mediacapture-main/#dom-mediastreamtrack-applyconstraints
+
+ promise_test(async t => {
+ await setMediaPermission("granted", ["camera"]);
+ return navigator.mediaDevices.getUserMedia({ video: true })
+ .then(t.step_func(stream => {
+ return stream.getVideoTracks()[0].applyConstraints(
+ { groupId: { exact: "INVALID" } }).then(
+ t.unreached_func('Accepted invalid groupID'),
+ t.step_func(e => {
+ assert_equals(e.name, 'OverconstrainedError');
+ assert_equals(e.constraint, 'groupId');
+ }));
+ }));
+ }, 'applyConstraints rejects invalid groupID');
+
+ promise_test(t => {
+ return navigator.mediaDevices.getUserMedia({ video: true })
+ .then(t.step_func(stream => {
+ var track = stream.getVideoTracks()[0];
+ var groupId = track.getSettings().groupId;
+ return track.applyConstraints({ groupId: "INVALID" }).then(
+ t.step_func(() => {
+ assert_equals(track.getSettings().groupId, groupId);
+ }));
+ }));
+ }, 'applyConstraints accepts invalid ideal groupID, does not change setting');
+
+ promise_test(t => {
+ return navigator.mediaDevices.getUserMedia({ video: true })
+ .then(t.step_func(stream => {
+ var track = stream.getVideoTracks()[0];
+ var groupId = track.getSettings().groupId;
+ return navigator.mediaDevices.enumerateDevices().then(devices => {
+ var anotherDevice = devices.find(device => {
+ return device.kind == "videoinput" && device.groupId != groupId;
+ });
+ if (anotherDevice !== undefined) {
+ return track.applyConstraints(
+ { groupId: { exact: anotherDevice.groupId } }).then(
+ t.unreached_func(),
+ t.step_func(e => {
+ assert_equals(e.name, 'OverconstrainedError');
+ assert_equals(e.constraint, 'groupId');
+ }));
+ }
+ });
+ }));
+ }, 'applyConstraints rejects attempt to switch device using groupId');
+
+ promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
+ const [track] = stream.getVideoTracks();
+ t.add_cleanup(() => track.stop());
+ try {
+ await track.applyConstraints({ resizeMode: { exact: "INVALID" } });
+ t.unreached_func('applyConstraints() must fail with invalid resizeMode')();
+ } catch (e) {
+ assert_equals(e.name, 'OverconstrainedError');
+ assert_equals(e.constraint, 'resizeMode');
+ }
+ }, 'applyConstraints rejects invalid resizeMode');
+
+ promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
+ const [track] = stream.getVideoTracks();
+ t.add_cleanup(() => track.stop());
+ const resizeMode = track.getSettings().resizeMode;
+ await track.applyConstraints({ resizeMode: "INVALID" });
+ assert_equals(track.getSettings().resizeMode, resizeMode);
+ }, 'applyConstraints accepts invalid ideal resizeMode, does not change setting');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-end-manual.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-end-manual.https.html
new file mode 100644
index 0000000000..58e484d484
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-end-manual.https.html
@@ -0,0 +1,54 @@
+<!doctype html>
+<html>
+<head>
+<title>Test that mediastreamtrack are properly ended</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://w3c.github.io/mediacapture-main/getusermedia.html#mediastreamtrack">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video and audio
+stream, and then revoke that permission.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the video and audio tracks of
+MediaStream object returned by the success callback in getUserMedia are
+correctly set into inactive state when permission is revoked.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+'use strict';
+promise_test(async t => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({
+ audio: true,
+ video: true,
+ });
+
+ const vidTrack = stream.getVideoTracks()[0];
+ assert_equals(vidTrack.readyState, "live",
+ "The video track object is in live state");
+ const vidEnded = new Promise(r => vidTrack.onended = r);
+ const audTrack = stream.getAudioTracks()[0];
+ assert_equals(audTrack.readyState, "live",
+ "The audio track object is in live state");
+ const audEnded = new Promise(r => audTrack.onended = r);
+
+ await Promise.race([vidEnded, audEnded]);
+ assert_equals(stream.getTracks().filter(t => t.readyState == "ended").length,
+ 1, "Only one track is ended after first track's ended event");
+ assert_equals(stream.getTracks().filter(t => t.readyState == "live").length,
+ 1, "One track is still live after first track's ended event");
+ assert_true(stream.active, "MediaStream is still active");
+
+ await Promise.all([vidEnded, audEnded]);
+ assert_equals(vidTrack.readyState, "ended", "Video track ended as expected");
+ assert_equals(audTrack.readyState, "ended", "Audio track ended as expected");
+ assert_false(stream.active, "MediaStream has become inactive as expected");
+}, "Tests that MediaStreamTracks end properly on permission revocation");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getCapabilities.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getCapabilities.https.html
new file mode 100644
index 0000000000..b67a8d5156
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getCapabilities.https.html
@@ -0,0 +1,153 @@
+<!doctype html>
+<title>MediaStreamTrack and InputDeviceInfo GetCapabilities</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+
+const audioProperties = [
+ {name: "sampleRate", type: "number"},
+ {name: "sampleSize", type: "number"},
+ {name: "echoCancellation", type: "boolean"},
+ {name: "autoGainControl", type: "boolean"},
+ {name: "noiseSuppression", type: "boolean"},
+ {name: "latency", type: "number"},
+ {name: "channelCount", type: "number"},
+ {name: "deviceId", type: "string"},
+ {name: "groupId", type: "string"}
+];
+
+const videoProperties = [
+ {name: "width", type: "number"},
+ {name: "height", type: "number"},
+ {name: "aspectRatio", type: "number"},
+ {name: "frameRate", type: "number"},
+ {name: "facingMode", type: "enum-any", validValues: ["user", "environment", "left", "right"]},
+ {name: "resizeMode", type: "enum-all", validValues: ["none", "crop-and-scale"]},
+ {name: "deviceId", type: "string"},
+ {name: "groupId", type: "string"},
+];
+
+function verifyBooleanCapability(capability) {
+ assert_less_than_equal(capability.length, 2);
+ capability.forEach(c => assert_equals(typeof c, "boolean"));
+}
+
+function verifyNumberCapability(capability) {
+ assert_equals(typeof capability, "object");
+ assert_equals(Object.keys(capability).length, 2);
+ assert_true(capability.hasOwnProperty('min'));
+ assert_true(capability.hasOwnProperty('max'));
+ assert_less_than_equal(capability.min, capability.max);
+}
+
+// Verify that any value provided by an enum capability is in the set of valid
+// values.
+function verifyEnumAnyCapability(capability, enumMembers) {
+ capability.forEach(c => {
+ assert_equals(typeof c, "string");
+ assert_in_array(c, enumMembers);
+ });
+}
+
+// Verify that all required values are supported by a capability.
+function verifyEnumAllCapability(capability, enumMembers, testNamePrefix) {
+ enumMembers.forEach(member => {
+ test(() => {
+ assert_in_array(member, capability);
+ }, testNamePrefix + " Value: " + member);
+ });
+}
+
+function testCapabilities(capabilities, property, testNamePrefix) {
+ let testName = testNamePrefix + " " + property.name;
+ test(() => {
+ assert_true(capabilities.hasOwnProperty(property.name));
+ }, testName + " property present.");
+
+ const capability = capabilities[property.name];
+ testName += " properly supported.";
+ if (property.type == "string") {
+ test(() => {
+ assert_equals(typeof capability, "string");
+ }, testName);
+ }
+
+ if (property.type == "boolean") {
+ test(() => {
+ verifyBooleanCapability(capability);
+ }, testName);
+ }
+
+ if (property.type == "number") {
+ test(() => {
+ verifyNumberCapability(capability);
+ }, testName);
+ }
+
+ if (property.type.startsWith("enum")) {
+ test(() => {
+ verifyEnumAnyCapability(capability, property.validValues);
+ }, testName);
+
+ if (property.type == "enum-all") {
+ verifyEnumAllCapability(capability, property.validValues, testName);
+ }
+ }
+}
+
+{
+ audioProperties.forEach((property, i) => {
+ promise_test(async t => {
+ if (i === 0) await setMediaPermission("granted", ["microphone"]);
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true});
+ t.add_cleanup(() => stream.getAudioTracks()[0].stop());
+ const audioCapabilities = stream.getAudioTracks()[0].getCapabilities();
+ testCapabilities(audioCapabilities, property, "Audio track getCapabilities()");
+ }, "Setup audio MediaStreamTrack getCapabilities() test for " + property.name);
+ });
+
+ videoProperties.forEach(property => {
+ promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ t.add_cleanup(() => stream.getVideoTracks()[0].stop());
+ const audioCapabilities = stream.getVideoTracks()[0].getCapabilities();
+ testCapabilities(audioCapabilities, property, "Video track getCapabilities()");
+ }, "Setup video MediaStreamTrack getCapabilities() test for " + property.name);
+ });
+}
+
+{
+ audioProperties.forEach(property => {
+ promise_test(async t => {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ for (const device of devices) {
+ // Test only one device.
+ if (device.kind == "audioinput") {
+ assert_inherits(device, "getCapabilities");
+ const capabilities = device.getCapabilities();
+ testCapabilities(capabilities, property, "Audio device getCapabilities()");
+ break;
+ }
+ }
+ }, "Setup audio InputDeviceInfo getCapabilities() test for " + property.name);
+ });
+
+ videoProperties.forEach(property => {
+ promise_test(async t => {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ for (const device of devices) {
+ // Test only one device.
+ if (device.kind == "videoinput") {
+ assert_inherits(device, "getCapabilities");
+ const capabilities = device.getCapabilities();
+ testCapabilities(capabilities, property, "Video device getCapabilities()");
+ break;
+ }
+ }
+ }, "Setup video InputDeviceInfo getCapabilities() test for " + property.name);
+ });
+}
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getSettings.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getSettings.https.html
new file mode 100644
index 0000000000..c1d90a4f48
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-getSettings.https.html
@@ -0,0 +1,222 @@
+<!doctype html>
+<title>MediaStreamTrack GetSettings</title>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<meta name=timeout content=long>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+ 'use strict'
+
+ // https://w3c.github.io/mediacapture-main/archives/20170605/getusermedia.html
+
+ async function createTrackAndGetSettings(t, kind) {
+ const constraints = {};
+ constraints[kind] = true;
+ const stream = await navigator.mediaDevices.getUserMedia(constraints);
+ assert_equals(stream.getTracks().length, 1);
+ t.add_cleanup(() => stream.getTracks()[0].stop());
+ return stream.getTracks()[0].getSettings();
+ }
+
+ promise_test(async t => {
+ await setMediaPermission("granted", ["camera"]);
+ const mediaStream1 = await navigator.mediaDevices.getUserMedia({
+ video: true,
+ audio: false,
+ });
+ t.add_cleanup(() => mediaStream1.getVideoTracks()[0].stop());
+ const settings1 = mediaStream1.getVideoTracks()[0].getSettings();
+
+ const mediaStream2 = await navigator.mediaDevices.getUserMedia({
+ video: {
+ deviceId: {exact: settings1.deviceId},
+ },
+ audio: false
+ });
+ t.add_cleanup(() => mediaStream2.getVideoTracks()[0].stop());
+ const settings2 = mediaStream2.getVideoTracks()[0].getSettings();
+
+ assert_equals(settings1.deviceId, settings2.deviceId);
+ }, 'A device can be opened twice and have the same device ID');
+
+ promise_test(async t => {
+ const mediaStream1 = await navigator.mediaDevices.getUserMedia({
+ video: true,
+ audio: false,
+ });
+ t.add_cleanup(() => mediaStream1.getVideoTracks()[0].stop());
+ const settings1 = mediaStream1.getVideoTracks()[0].getSettings();
+
+ const mediaStream2 = await navigator.mediaDevices.getUserMedia({
+ video: {
+ deviceId: {exact: settings1.deviceId},
+ width: {
+ ideal: settings1.width / 2,
+ },
+ },
+ audio: false
+ });
+ t.add_cleanup(() => mediaStream2.getVideoTracks()[0].stop());
+ const settings2 = mediaStream2.getVideoTracks()[0].getSettings();
+
+ assert_equals(settings1.deviceId, settings2.deviceId);
+ assert_between_inclusive(settings2.width, settings1.width / 2, settings1.width);
+ }, 'A device can be opened twice with different resolutions requested');
+
+ promise_test(async t => {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const inputDevices = devices.filter(d => d.kind != "audiooutput");
+ assert_greater_than(inputDevices.length, 0);
+ for (const device of inputDevices) {
+ const device_id_constraint = {deviceId: {exact: device.deviceId}};
+ const constraints = device.kind == "audioinput"
+ ? {audio: device_id_constraint}
+ : {video: device_id_constraint};
+
+ const stream = await navigator.mediaDevices.getUserMedia(constraints);
+ assert_true(stream.getTracks()[0].getSettings().groupId === device.groupId, "device groupId");
+ assert_greater_than(device.groupId.length, 0);
+ }
+ }, 'groupId is correctly reported by getSettings() for all input devices');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.deviceId), "string",
+ "deviceId should exist and it should be a string.");
+ }, 'deviceId is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.groupId), "string",
+ "groupId should exist and it should be a string.");
+ }, 'groupId is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.sampleRate), "number",
+ "sampleRate should exist and it should be a number.");
+ assert_greater_than(settings.sampleRate, 0);
+ }, 'sampleRate is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.sampleSize), "number",
+ "sampleSize should exist and it should be a number.");
+ assert_greater_than(settings.sampleSize, 0);
+ }, 'sampleSize is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.echoCancellation), "boolean",
+ "echoCancellation should exist and it should be a boolean.");
+ }, 'echoCancellation is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.autoGainControl), "boolean",
+ "autoGainControl should exist and it should be a boolean.");
+ }, 'autoGainControl is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.noiseSuppression), "boolean",
+ "noiseSuppression should exist and it should be a boolean.");
+ }, 'noiseSuppression is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.latency), "number",
+ "latency should exist and it should be a number.");
+ assert_greater_than_equal(settings.latency,0);
+ }, 'latency is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "audio");
+ assert_equals(typeof(settings.channelCount), "number",
+ "channelCount should exist and it should be a number.");
+ assert_greater_than(settings.channelCount, 0);
+ }, 'channelCount is reported by getSettings() for getUserMedia() audio tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.deviceId), "string",
+ "deviceId should exist and it should be a string.");
+ }, 'deviceId is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.groupId), "string",
+ "groupId should exist and it should be a string.");
+ }, 'groupId is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.width), "number",
+ "width should exist and it should be a number.");
+ assert_true(Number.isInteger(settings.width), "width should be an integer.");
+ assert_greater_than_equal(settings.width, 0);;
+ }, 'width is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.height), "number",
+ "height should exist and it should be a number.");
+ assert_true(Number.isInteger(settings.height), "height should be an integer.");
+ assert_greater_than_equal(settings.height, 0);
+ }, 'height is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.aspectRatio), "number",
+ "aspectRatio should exist and it should be a number.");
+ assert_greater_than_equal(settings.aspectRatio, 0);
+ }, 'aspectRatio is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.frameRate), "number",
+ "frameRate should exist and it should be a number.");
+ assert_greater_than_equal(settings.frameRate, 0);
+ }, 'frameRate is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ // facingMode not treated as mandatory because not all platforms provide
+ // this information.
+ if (settings.facingMode) {
+ assert_equals(typeof(settings.facingMode), "string",
+ "If facingMode is provided it should be a string.");
+ assert_in_array(settings.facingMode,
+ ['user', 'environment', 'left', 'right']);
+ }
+ }, 'facingMode is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const settings = await createTrackAndGetSettings(t, "video");
+ assert_equals(typeof(settings.resizeMode), "string",
+ "resizeMode should exist and it should be a string.");
+ assert_in_array(settings.resizeMode, ['none', 'crop-and-scale']);
+ }, 'resizeMode is reported by getSettings() for getUserMedia() video tracks');
+
+ promise_test(async t => {
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video : true});
+ const audioTrack = stream.getAudioTracks()[0];
+ const videoTrack = stream.getVideoTracks()[0];
+
+ const audioDeviceId = audioTrack.getSettings().deviceId;
+ const videoDeviceId = videoTrack.getSettings().deviceId;
+ const audioGroupId = audioTrack.getSettings().groupId;
+ const videoGroupId = videoTrack.getSettings().groupId;
+
+ audioTrack.stop();
+ videoTrack.stop();
+
+ assert_equals(audioTrack.getSettings().deviceId, audioDeviceId, "audio track deviceId");
+ assert_equals(videoTrack.getSettings().deviceId, videoDeviceId, "video track deviceId");
+ assert_equals(audioTrack.getSettings().groupId, audioGroupId, "audio track groupId");
+ assert_equals(videoTrack.getSettings().groupId, videoGroupId, "video track groupId");
+ }, 'Stopped tracks should expose deviceId/groupId');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-id.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-id.https.html
new file mode 100644
index 0000000000..a9b4b99f87
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-id.https.html
@@ -0,0 +1,27 @@
+<!doctype html>
+<html>
+<head>
+<title>Distinct id for distinct mediastream tracks</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-id">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your audio and video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that distinct mediastream tracks have distinct ids.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission();
+ const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true})
+ assert_not_equals(stream.getVideoTracks()[0], stream.getAudioTracks()[0].id, "audio and video tracks have distinct ids");
+}, "Tests that distinct mediastream tracks have distinct ids ");
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-audio-transfer.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-audio-transfer.https.html
new file mode 100644
index 0000000000..e1df23a24a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-audio-transfer.https.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<title>MediaStreamTrack transfer to iframe</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+promise_test(async () => {
+ const iframe = document.createElement("iframe");
+ const stream = await navigator.mediaDevices.getDisplayMedia({audio:true, video: true});
+ const track = stream.getAudioTracks()[0];
+ const cloned_track = track.clone();
+ const result = new Promise((resolve, reject) => {
+ window.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed: ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ iframe.addEventListener("load", () => {
+ assert_not_equals(track.readyState, "ended");
+ iframe.contentWindow.postMessage(track);
+ assert_equals(track.readyState, "ended");
+ assert_equals(cloned_track.readyState, "live");
+ });
+ iframe.src = "support/iframe-MediaStreamTrack-transfer.html";
+ document.body.appendChild(iframe);
+ return result;
+});
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-transfer.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-transfer.https.html
new file mode 100644
index 0000000000..54fc2b2d16
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-iframe-transfer.https.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<title>MediaStreamTrack transfer to iframe</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+promise_test(async () => {
+ const iframe = document.createElement("iframe");
+ const stream = await navigator.mediaDevices.getDisplayMedia({video: true});
+ const track = stream.getVideoTracks()[0];
+ const cloned_track = track.clone();
+ const iframeLoaded = new Promise((resolve) => {iframe.onload = resolve});
+
+ iframe.src = "support/iframe-MediaStreamTrack-transfer.html";
+ document.body.appendChild(iframe);
+
+ await iframeLoaded;
+
+ const nextMessage = new Promise((resolve) => {
+ window.onmessage = resolve
+ });
+
+ assert_not_equals(track.readyState, "ended");
+ iframe.contentWindow.postMessage(track);
+ assert_equals(track.readyState, "ended");
+ assert_equals(cloned_track.readyState, "live");
+
+ const message = await nextMessage;
+ assert_not_equals(message.data.result, 'Failure', 'Failed: ' + message.data.error);
+});
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-init.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-init.https.html
new file mode 100644
index 0000000000..54ebf049e8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-init.https.html
@@ -0,0 +1,39 @@
+<!doctype html>
+<html>
+<head>
+<title>getUserMedia({video:true}) creates a stream with a properly initialized video track</title>
+<link rel="author" title="Dominique Hazael-Massieux" href="mailto:dom@w3.org"/>
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#idl-def-MediaStreamTrack">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#life-cycle-and-media-flow">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-kind">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-enabled">
+<link rel="help" href="http://dev.w3.org/2011/webrtc/editor/getusermedia.html#widl-MediaStreamTrack-readyState">
+</head>
+<body>
+<p class="instructions">When prompted, accept to share your video stream.</p>
+<h1 class="instructions">Description</h1>
+<p class="instructions">This test checks that the video track of MediaStream
+object returned by the success callback in getUserMedia is correctly initialized.</p>
+
+<div id='log'></div>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script>
+promise_test(async () => {
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ const videoTracks = stream.getVideoTracks();
+ assert_equals(videoTracks.length, 1, "There is exactly one video track in the media stream");
+ track = videoTracks[0];
+ assert_equals(track.readyState, "live", "The track object is in live state");
+ assert_equals(track.kind, "video", "The track object is of video kind");
+ // Not clear that this is required by the spec,
+ // see https://www.w3.org/Bugs/Public/show_bug.cgi?id=22212
+ assert_true(track.enabled, "The track object is enabed");
+});
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer-video.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer-video.https.html
new file mode 100644
index 0000000000..f38768a472
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer-video.https.html
@@ -0,0 +1,26 @@
+<!doctype html>
+<title>MediaStreamTrack transfer to iframe</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+promise_test(async () => {
+ const iframe = document.createElement("iframe");
+ const stream = await navigator.mediaDevices.getUserMedia({video: true});
+ const track = stream.getVideoTracks()[0];
+ const result = new Promise((resolve, reject) => {
+ window.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed: ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ iframe.addEventListener("load", () => {
+ iframe.contentWindow.postMessage(track);
+ });
+ iframe.src = "support/iframe-MediaStreamTrack-transfer-video.html";
+ document.body.appendChild(iframe);
+ return result;
+});
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer.https.html
new file mode 100644
index 0000000000..e110b4b372
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrack-transfer.https.html
@@ -0,0 +1,50 @@
+<!doctype html>
+<title>MediaStreamTrack transfer to Worker</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=/resources/testdriver.js></script>
+<script src=/resources/testdriver-vendor.js></script>
+<script src=permission-helper.js></script>
+<script id="workerCode" type="javascript/worker">
+self.onmessage = (e) => {
+ try {
+ if(e.data instanceof MediaStreamTrack) {
+ self.postMessage({result: 'Success'});
+ return;
+ } else {
+ self.postMessage({
+ result: 'Failure',
+ error: `${e.data} is not a MediaStreamTrack`
+ });
+ }
+ } catch (error) {
+ self.postMessage({
+ result: 'Failure',
+ error
+ });
+ }
+}
+</script>
+<script>
+promise_test(async () => {
+ const workerBlob = new Blob([document.querySelector('#workerCode').textContent],
+ {type: "text/javascript"});
+ const workerUrl = window.URL.createObjectURL(workerBlob);
+ const worker = new Worker(workerUrl);
+ window.URL.revokeObjectURL(workerUrl);
+ await setMediaPermission("granted", ["camera"]);
+ const stream = await navigator.mediaDevices.getDisplayMedia({video: true});
+ const track = stream.getVideoTracks()[0];
+ const result = new Promise((resolve, reject) => {
+ worker.onmessage = (e) => {
+ if (e.data.result === 'Failure') {
+ reject('Failed: ' + e.data.error);
+ } else {
+ resolve();
+ }
+ };
+ });
+ worker.postMessage(track, [track]);
+ return result;
+});
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/MediaStreamTrackEvent-constructor.https.html b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrackEvent-constructor.https.html
new file mode 100644
index 0000000000..4946cd71d8
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/MediaStreamTrackEvent-constructor.https.html
@@ -0,0 +1,42 @@
+<!doctype html>
+<title>MediaStreamTrackEvent constructor</title>
+<link rel="help" href="https://w3c.github.io/mediacapture-main/#mediastreamtrackevent">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+test(function() {
+ assert_equals(MediaStreamTrackEvent.length, 2);
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type");
+ });
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type", null);
+ });
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type", undefined);
+ });
+}, "The eventInitDict argument is required");
+
+test(function() {
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type", {});
+ });
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type", { track: null });
+ });
+ assert_throws_js(TypeError, function() {
+ new MediaStreamTrackEvent("type", { track: undefined });
+ });
+}, "The eventInitDict's track member is required.");
+
+test(function() {
+ // a MediaStreamTrack instance is needed to test, any instance will do.
+ var context = new AudioContext();
+ var dest = context.createMediaStreamDestination();
+ var track = dest.stream.getTracks()[0];
+ assert_true(track instanceof MediaStreamTrack);
+ var event = new MediaStreamTrackEvent("type", { track: track });
+ assert_equals(event.type, "type");
+ assert_equals(event.track, track);
+}, "The MediaStreamTrackEvent instance's track attribute is set.");
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/crashtests/enumerateDevices-after-discard-1.https.html b/testing/web-platform/tests/mediacapture-streams/crashtests/enumerateDevices-after-discard-1.https.html
new file mode 100644
index 0000000000..d1f4bab145
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/crashtests/enumerateDevices-after-discard-1.https.html
@@ -0,0 +1,18 @@
+<html class="test-wait">
+<head>
+ <title>
+ Test enumerateDevices() calls either side of browsing context discard
+ </title>
+</head>
+<script>
+ const frame = document.createElement('frame');
+ document.documentElement.appendChild(frame);
+ const devices = frame.contentWindow.navigator.mediaDevices;
+ devices.enumerateDevices();
+ frame.remove();
+ devices.enumerateDevices();
+ // Wait long enough to expect the async enumerateDevices() code to complete.
+ navigator.mediaDevices.enumerateDevices().then(
+ () => document.documentElement.removeAttribute("class"));
+</script>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/enumerateDevices-with-navigation.https.html b/testing/web-platform/tests/mediacapture-streams/enumerateDevices-with-navigation.https.html
new file mode 100644
index 0000000000..d951c1cdff
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/enumerateDevices-with-navigation.https.html
@@ -0,0 +1,77 @@
+<!doctype html>
+<title>enumerateDevices() with navigation</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<body></body>
+<script>
+'use strict';
+const blank_url = '/common/blank.html';
+const search2 = '?2';
+
+function promise_new_task(t) {
+ return new Promise(resolve => t.step_timeout(resolve, 0));
+}
+function promise_event(target, name) {
+ return new Promise(resolve => target[`on${name}`] = resolve);
+}
+
+promise_test(async t => {
+ // Gecko persists only toplevel documents, so load documents in a toplevel.
+ await test_driver.bless('window.open()');
+ const proxy = window.open(blank_url);
+ t.add_cleanup(() => proxy.close());
+ await promise_event(proxy, 'pageshow');
+ const devices = proxy.navigator.mediaDevices;
+ // Use another task so that another load creates a new session history entry.
+ await promise_new_task(t);
+
+ proxy.location = blank_url + search2;
+ await promise_event(proxy, 'pagehide');
+ // Use another task to ensure the first subdocument is no longer fully
+ // active and proxy refers to the realm of the second document.
+ await promise_new_task(t);
+ assert_equals(proxy.location.search, search2, 'navigated search');
+ // Enumerate from the inactive first Window.
+ const promise_enumerate = devices.enumerateDevices();
+ // `then()` is used rather than static Promise methods because microtasks
+ // for `PromiseResolve()` do not run when Promises from inactive realms are
+ // involved. Whether microtasks for `then()` run depends on the realm of
+ // the handler rather than the realm of the Promise.
+ // Don't use `finally()`, because it uses `PromiseResolve()` and so
+ // microtasks don't run.
+ // See https://github.com/whatwg/html/issues/5319.
+ let promise_state = 'pending';
+ promise_enumerate.then(() => promise_state = 'resolved',
+ () => promise_state = 'rejected');
+ // Enumerate in the active second Window to provide enough time to check
+ // that the Promise from the inactive Window does not settle.
+ await proxy.navigator.mediaDevices.enumerateDevices();
+
+ proxy.history.back();
+ await promise_event(proxy, 'pagehide');
+ // enumerateDevices() Promise resolution is triggered only in parallel
+ // steps, so manipulation of the Promise (if the first document was
+ // persisted) would occur through a queued task, which would run after
+ // the pagehide event is dispatched and so after the associated
+ // microtask that runs the following assert.
+ // https://html.spec.whatwg.org/multipage/webappapis.html#event-loop-for-spec-authors
+ assert_equals(promise_state, 'pending', 'Promise state while inactive');
+ // If the first document is restored, then that will occur immediately after
+ // pagehide (and associated microtasks), before the next global task is run.
+ // https://html.spec.whatwg.org/multipage/history.html#traverse-the-history-by-a-delta
+ await promise_new_task(t);
+ if (proxy.navigator.mediaDevices == devices) {
+ // The first document was persisted and restored.
+ assert_equals(proxy.location.search, '', 'history search');
+ await promise_enumerate;
+ } else {
+ // The first document was not restored, but gets re-fetched.
+ await t.step_wait(() => proxy.location.search == '', 'navigation');
+ assert_not_equals(proxy.navigator.mediaDevices, devices, 'new realm')
+ await proxy.navigator.mediaDevices.enumerateDevices();
+ assert_equals(promise_state, 'pending', 'Promise state after discard');
+ }
+}, 'enumerateDevices with navigation');
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/historical.https.html b/testing/web-platform/tests/mediacapture-streams/historical.https.html
new file mode 100644
index 0000000000..84326cec0a
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/historical.https.html
@@ -0,0 +1,33 @@
+<!doctype html>
+<title>Historical Media Capture and Streams features</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id="log"></div>
+<script>
+test(function() {
+ assert_false("webkitMediaStream" in window);
+}, "webkitMediaStream interface should not exist");
+
+test(function() {
+ assert_false("webkitGetUserMedia" in navigator);
+}, "navigator.webkitGetUserMedia should not exist");
+
+test(function() {
+ assert_false("mozGetUserMedia" in navigator);
+}, "navigator.mozGetUserMedia should not exist");
+
+test(() => {
+ const mediaStream = new MediaStream();
+ assert_throws_js(TypeError, () => URL.createObjectURL(mediaStream));
+}, "Passing MediaStream to URL.createObjectURL() should throw");
+
+test(() => {
+ const mediaStream = new MediaStream();
+ assert_false("onactive" in mediaStream);
+}, "MediaStream.onactive should not exist");
+
+test(() => {
+ const mediaStream = new MediaStream();
+ assert_false("oninactive" in mediaStream);
+}, "MediaStream.oninactive should not exist");
+</script>
diff --git a/testing/web-platform/tests/mediacapture-streams/idlharness.https.window.js b/testing/web-platform/tests/mediacapture-streams/idlharness.https.window.js
new file mode 100644
index 0000000000..e8e3cdab21
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/idlharness.https.window.js
@@ -0,0 +1,50 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: timeout=long
+
+'use strict';
+
+// https://w3c.github.io/mediacapture-main/
+
+idl_test(
+ ['mediacapture-streams'],
+ ['webidl', 'dom', 'html'],
+ async idl_array => {
+ const inputDevices = [];
+ const outputDevices = [];
+ try {
+ const list = await navigator.mediaDevices.enumerateDevices();
+ for (const device of list) {
+ if (device.kind in self) {
+ continue;
+ }
+ assert_in_array(device.kind, ['audioinput', 'videoinput', 'audiooutput']);
+ self[device.kind] = device;
+ if (device.kind.endsWith('input')) {
+ inputDevices.push(device.kind);
+ } else {
+ outputDevices.push(device.kind);
+ }
+ }
+ } catch (e) {}
+
+ try {
+ self.stream = await navigator.mediaDevices.getUserMedia({audio: true});
+ self.track = stream.getTracks()[0];
+ self.trackEvent = new MediaStreamTrackEvent("type", {
+ track: track,
+ });
+ } catch (e) {}
+
+ idl_array.add_objects({
+ InputDeviceInfo: inputDevices,
+ MediaStream: ['stream', 'new MediaStream()'],
+ Navigator: ['navigator'],
+ MediaDevices: ['navigator.mediaDevices'],
+ MediaDeviceInfo: outputDevices,
+ MediaStreamTrack: ['track'],
+ MediaStreamTrackEvent: ['trackEvent'],
+ OverconstrainedError: ['new OverconstrainedError("constraint")'],
+ });
+ }
+);
diff --git a/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html
new file mode 100644
index 0000000000..27dd046ac5
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html
@@ -0,0 +1,2 @@
+<!DOCTYPE html>
+<script src="message-enumerateddevices.js"></script>
diff --git a/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html.headers b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html.headers
new file mode 100644
index 0000000000..ac4ddd8aba
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate-cleared.html.headers
@@ -0,0 +1 @@
+Clear-Site-Data: "cookies"
diff --git a/testing/web-platform/tests/mediacapture-streams/iframe-enumerate.html b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate.html
new file mode 100644
index 0000000000..27dd046ac5
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/iframe-enumerate.html
@@ -0,0 +1,2 @@
+<!DOCTYPE html>
+<script src="message-enumerateddevices.js"></script>
diff --git a/testing/web-platform/tests/mediacapture-streams/message-enumerateddevices.js b/testing/web-platform/tests/mediacapture-streams/message-enumerateddevices.js
new file mode 100644
index 0000000000..4541636b4b
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/message-enumerateddevices.js
@@ -0,0 +1,8 @@
+onmessage = async e => {
+ const stream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
+ stream.getTracks().forEach(t => t.stop());
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ e.source.postMessage({
+ devices: devices.map(d => d.toJSON())
+ }, '*');
+}
diff --git a/testing/web-platform/tests/mediacapture-streams/parallel-capture-requests.https.html b/testing/web-platform/tests/mediacapture-streams/parallel-capture-requests.https.html
new file mode 100644
index 0000000000..301515d1bd
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/parallel-capture-requests.https.html
@@ -0,0 +1,57 @@
+<!doctype html>
+<html>
+<head>
+<title>Parallel capture requests</title>
+</head>
+<body>
+<button id="button">User gesture</button>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script>
+async function getDisplayMedia(constraints) {
+ const p = new Promise(r => button.onclick = r);
+ await test_driver.click(button);
+ await p;
+ return navigator.mediaDevices.getDisplayMedia(constraints);
+}
+
+promise_test(function() {
+ const getUserMediaPromise =
+ navigator.mediaDevices.getUserMedia({audio: true, video:true});
+ const getDisplayMediaPromise =
+ getDisplayMedia({video: true, audio: true});
+ return Promise.all([getUserMediaPromise, getDisplayMediaPromise])
+ .then(function(s) {
+ assert_greater_than_equal(s[0].getTracks().length, 1);
+ assert_less_than_equal(s[0].getTracks().length, 2);
+ assert_equals(s[0].getVideoTracks().length, 1);
+ assert_less_than_equal(s[0].getAudioTracks().length, 1);
+ assert_greater_than_equal(s[1].getTracks().length, 1);
+ assert_less_than_equal(s[1].getTracks().length, 2);
+ assert_equals(s[1].getVideoTracks().length, 1);
+ assert_less_than_equal(s[1].getAudioTracks().length, 1);
+ });
+}, 'getDisplayMedia() and parallel getUserMedia()');
+
+promise_test(function() {
+ const getDisplayMediaPromise =
+ getDisplayMedia({video: true, audio: true});
+ const getUserMediaPromise =
+ navigator.mediaDevices.getUserMedia({audio: true, video:true});
+ return Promise.all([getDisplayMediaPromise, getUserMediaPromise])
+ .then(function(s) {
+ assert_greater_than_equal(s[0].getTracks().length, 1);
+ assert_less_than_equal(s[0].getTracks().length, 2);
+ assert_equals(s[0].getVideoTracks().length, 1);
+ assert_less_than_equal(s[0].getAudioTracks().length, 1);
+ assert_greater_than_equal(s[1].getTracks().length, 1);
+ assert_less_than_equal(s[1].getTracks().length, 2);
+ assert_equals(s[1].getVideoTracks().length, 1);
+ assert_less_than_equal(s[1].getAudioTracks().length, 1);
+ });
+}, 'getUserMedia() and parallel getDisplayMedia()');
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/permission-helper.js b/testing/web-platform/tests/mediacapture-streams/permission-helper.js
new file mode 100644
index 0000000000..0a237f7d43
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/permission-helper.js
@@ -0,0 +1,24 @@
+// Set permissions for camera and microphone using Web Driver
+// Status can be one of "granted" or "denied"
+// Scope take values from permission names
+async function setMediaPermission(status="granted", scope=["camera", "microphone"]) {
+ try {
+ for (let s of scope) {
+ await test_driver.set_permission({ name: s }, status);
+ }
+ } catch (e) {
+ const noSetPermissionSupport = typeof e === "string" && e.match(/set_permission not implemented/);
+ if (!(noSetPermissionSupport ||
+ (e instanceof Error && e.message.match("unimplemented")) )) {
+ throw e;
+ }
+ // Web Driver not implemented action
+ // FF: https://bugzilla.mozilla.org/show_bug.cgi?id=1524074
+
+ // with current WPT runners, will default to granted state for FF and Safari
+ // throw if status!="granted" to invalidate test results
+ if (status === "denied") {
+ assert_implements_optional(!noSetPermissionSupport, "Unable to set permission to denied for this test");
+ }
+ }
+}
diff --git a/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer-video.html b/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer-video.html
new file mode 100644
index 0000000000..9f37ba0ffa
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer-video.html
@@ -0,0 +1,27 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>iframe</title>
+ <script>
+ function onMsg(e) {
+ if(e.data instanceof MediaStreamTrack) {
+ const track = e.data;
+ video = document.getElementById("myvideo");
+ video.srcObject = new MediaStream ([track]);
+ video.play();
+
+ parent.postMessage({result: 'Success'});
+ } else {
+ parent.postMessage({
+ result: 'Failure',
+ error: `${e.data} is not a MediaStreamTrack`
+ });
+ }
+ }
+ window.addEventListener("message", onMsg);
+ </script>
+ </head>
+ <body>
+ <video id="myvideo"></video>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer.html b/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer.html
new file mode 100644
index 0000000000..e8d6aac647
--- /dev/null
+++ b/testing/web-platform/tests/mediacapture-streams/support/iframe-MediaStreamTrack-transfer.html
@@ -0,0 +1,21 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>iframe</title>
+ <script>
+ function onMsg(e) {
+ if(e.data instanceof MediaStreamTrack) {
+ parent.postMessage({result: 'Success'});
+ } else {
+ parent.postMessage({
+ result: 'Failure',
+ error: `${e.data} is not a MediaStreamTrack`
+ });
+ }
+ }
+ window.addEventListener("message", onMsg);
+ </script>
+ </head>
+ <body>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/mediasession/META.yml b/testing/web-platform/tests/mediasession/META.yml
new file mode 100644
index 0000000000..8eb7fa0261
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/META.yml
@@ -0,0 +1,3 @@
+spec: https://w3c.github.io/mediasession/
+suggested_reviewers:
+ - mounirlamouri
diff --git a/testing/web-platform/tests/mediasession/README.md b/testing/web-platform/tests/mediasession/README.md
new file mode 100644
index 0000000000..7c7c9f8d57
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/README.md
@@ -0,0 +1,20 @@
+# Media Session specification Tests
+
+The Media Session specification is available here: https://wicg.github.io/mediasession
+
+GitHub repository: https://github.com/WICG/mediasession
+
+File an issue: https://github.com/WICG/mediasession/issues/new
+
+## Device/system dependency
+
+The Media Session specification defines behaviors related to device or system
+features such as the actions to run when a software/hardware media key is used.
+These behaviors are not tested because they would depend on user agent specific
+implementations and device specific features.
+
+## Status of these tests
+
+These tests are not complete and only reflect the Blink tests that could be
+exported. If a reader find a behavior that could be tested and is not, they
+should feel free to file a bug.
diff --git a/testing/web-platform/tests/mediasession/helper/artwork-generator.html b/testing/web-platform/tests/mediasession/helper/artwork-generator.html
new file mode 100644
index 0000000000..5a2fbb151d
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/helper/artwork-generator.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<title>MediaImage</title>
+<script>
+function createArtworkFromURLs(sources) {
+ let artwork = [];
+ for (const source of sources) {
+ artwork.push({
+ src: source
+ });
+ }
+
+ let metadata = new MediaMetadata({
+ artwork: artwork
+ });
+ return metadata.artwork;
+}
+
+</script>
diff --git a/testing/web-platform/tests/mediasession/idlharness.window.js b/testing/web-platform/tests/mediasession/idlharness.window.js
new file mode 100644
index 0000000000..e4d914544e
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/idlharness.window.js
@@ -0,0 +1,18 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+// https://w3c.github.io/mediasession/
+
+'use strict';
+
+idl_test(
+ ['mediasession'],
+ ['html'],
+ idl_array => {
+ idl_array.add_objects({
+ MediaMetadata: ['new MediaMetadata()'],
+ MediaSession: ['navigator.mediaSession'],
+ Navigator: ['navigator']
+ });
+ }
+);
diff --git a/testing/web-platform/tests/mediasession/mediametadata.html b/testing/web-platform/tests/mediasession/mediametadata.html
new file mode 100644
index 0000000000..ff00e54ee7
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/mediametadata.html
@@ -0,0 +1,219 @@
+<!DOCTYPE html>
+<title>MediaMetadata interface</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+function load_iframe(src) {
+ return new Promise(resolve => {
+ const iframe = document.createElement('iframe');
+ iframe.onload = () => { resolve(iframe); };
+ iframe.src = src;
+ iframe.style.display = 'none';
+ document.documentElement.appendChild(iframe);
+ });
+}
+
+test(function() {
+ var metadata = new MediaMetadata({});
+ navigator.mediaSession.metadata = metadata;
+ assert_equals(navigator.mediaSession.metadata, metadata);
+}, "Test that mediaSession.metadata is properly set");
+
+test(function() {
+ var metadata = new MediaMetadata({});
+ navigator.mediaSession.metadata = metadata;
+ metadata.title = 'foo';
+ assert_equals(navigator.mediaSession.metadata.title, 'foo');
+}, "Test that changes to metadata propagate properly");
+
+test(function() {
+ var metadata = new MediaMetadata({});
+ navigator.mediaSession.metadata = metadata;
+ navigator.mediaSession.metadata = null;
+ assert_equals(navigator.mediaSession.metadata, null);
+}, "Test that resetting metadata to null is reflected");
+
+test(function() {
+ var metadata = new MediaMetadata({});
+ assert_not_equals(metadata, null);
+
+ assert_throws_js(TypeError, _ => new MediaMetadata('foobar'));
+ assert_throws_js(TypeError, _ => new MediaMetadata(42));
+}, 'Test that MediaMetadata is constructed using a dictionary');
+
+test(function() {
+ var metadata = new MediaMetadata();
+ assert_not_equals(metadata, null);
+}, "Test that MediaMetadata constructor can take no parameter");
+
+test(function() {
+ var image1 = { src: 'http://example.com/1', sizes: 'sizes1', type: 'type1' };
+ var image2 = { src: 'http://example.com/2', sizes: 'sizes2', type: 'type2' };
+ var metadata = new MediaMetadata({
+ title: 'foo', album: 'bar', artist: 'plop', artwork: [ image1, image2 ]
+ });
+
+ assert_equals(metadata.title, 'foo');
+ assert_equals(metadata.album, 'bar');
+ assert_equals(metadata.artist, 'plop');
+ assert_equals(metadata.artwork.length, 2);
+ assert_equals(metadata.artwork[0].src, image1.src);
+ assert_equals(metadata.artwork[0].sizes, image1.sizes);
+ assert_equals(metadata.artwork[0].type, image1.type);
+ assert_equals(metadata.artwork[1].src, image2.src);
+ assert_equals(metadata.artwork[1].sizes, image2.sizes);
+ assert_equals(metadata.artwork[1].type, image2.type);
+}, 'Test the different values allowed in MediaMetadata init dictionary');
+
+test(function() {
+ var metadata = new MediaMetadata({});
+ assert_equals(metadata.title, '');
+ assert_equals(metadata.artist, '');
+ assert_equals(metadata.album, '');
+ assert_equals(0, metadata.artwork.length);
+}, 'Test the default values for MediaMetadata with empty init dictionary');
+
+test(function() {
+ var metadata = new MediaMetadata();
+ assert_equals(metadata.title, '');
+ assert_equals(metadata.artist, '');
+ assert_equals(metadata.album, '');
+ assert_equals(0, metadata.artwork.length);
+}, 'Test the default values for MediaMetadata with no init dictionary');
+
+test(function() {
+ var metadata = new MediaMetadata({ randomValueThatWillNotBeAdded: '... hopefully ;)' });
+ assert_equals(metadata.randomValueThatWillNotBeAdded, undefined);
+}, 'Test that passing unknown values to the dictionary is a no-op');
+
+test(function() {
+ var image1 = { src: 'http://example.com/1', sizes: 'sizes1', type: 'type1' };
+ var image2 = { src: 'http://example.com/2', sizes: 'sizes2', type: 'type2' };
+ var metadata = new MediaMetadata({
+ title: 'foo', album: 'bar', artist: 'plop', artwork: [ image1, image2 ]
+ });
+
+ metadata.title = 'something else';
+ assert_equals(metadata.title, 'something else');
+
+ metadata.album = 'other value';
+ assert_equals(metadata.album, 'other value');
+
+ metadata.artist = 'someone else';
+ assert_equals(metadata.artist, 'someone else');
+
+ var image = { src: 'http://example.com/', sizes: '40x40', type: 'image/png' };
+ metadata.artwork = [ image ];
+ assert_equals(metadata.artwork.length, 1);
+ assert_equals(metadata.artwork[0].src, 'http://example.com/');
+ assert_equals(metadata.artwork[0].sizes, '40x40');
+ assert_equals(metadata.artwork[0].type, 'image/png');
+}, "Test that MediaMetadata is read/write");
+
+test(function() {
+ var metadata = new MediaMetadata({ artwork: [ { src: 'http://foo.com/' } ] });
+ assert_throws_js(TypeError, _ => {
+ metadata.artwork.push({
+ src: 'http://example.com/', sizes: '40x40', type: 'image/png',
+ });
+ });
+
+ metadata.artwork[0].src = 'bar';
+ assert_equals(metadata.artwork[0].src, 'http://foo.com/');
+}, "Test that MediaMetadat.artwork can't be modified");
+
+test(function() {
+ var metadata = new MediaMetadata({ artwork: [{
+ src: 'http://example.com/', sizes: '40x40', type: 'image/png',
+ some_other_value: 'foo',
+ }]});
+ assert_equals(metadata.artwork[0].src, 'http://example.com/');
+ assert_equals(metadata.artwork[0].sizes, '40x40');
+ assert_equals(metadata.artwork[0].type, 'image/png');
+ assert_false('some_other_value' in metadata.artwork[0]);
+
+ metadata.artwork[0].something_else = 'bar';
+ assert_false('something_else' in metadata.artwork[0]);
+}, "Test that MediaMetadata.artwork will not expose unknown properties");
+
+test(function() {
+ var metadata = new MediaMetadata({ artwork: [
+ { src: 'http://example.com/1', sizes: '40x40', type: 'image/png' },
+ { src: 'http://example.com/2', sizes: '40x40', type: 'image/png' },
+ ]});
+
+ assert_true(Object.isFrozen(metadata.artwork));
+ for (var i = 0; i < metadata.artwork.length; ++i)
+ assert_true(Object.isFrozen(metadata.artwork[i]));
+}, "Test that MediaMetadata.artwork is Frozen");
+
+test(function() {
+ var metadata = new MediaMetadata({ artwork: [
+ { src: 'http://example.com', sizes: '40x40', type: 'image/png' },
+ { src: '../foo', sizes: '40x40', type: 'image/png' },
+ { src: '/foo/bar', sizes: '40x40', type: 'image/png' },
+ ]});
+
+ assert_equals(metadata.artwork[0].src, new URL('http://example.com', document.URL).href)
+ assert_equals(metadata.artwork[1].src, new URL('../foo', document.URL).href)
+ assert_equals(metadata.artwork[2].src, new URL('/foo/bar', document.URL).href)
+}, "Test that MediaMetadata.artwork returns parsed urls");
+
+test(function() {
+ var metadata = 42;
+
+ assert_throws_js(TypeError, _ => {
+ metadata
+ new MediaMetadata({ artwork: [ { src: 'http://[example.com]' }] });
+ });
+ assert_equals(metadata, 42);
+
+ metadata = new MediaMetadata();
+ assert_throws_js(TypeError, _ => {
+ metadata.artwork = [
+ // Valid url.
+ { src: 'http://example.com' },
+ // Invalid url.
+ { src: 'http://example.com:demo' },
+ ];
+ });
+ assert_equals(metadata.artwork.length, 0);
+
+}, "Test that MediaMetadata throws when setting an invalid url");
+
+test(function() {
+ var metadata = new MediaMetadata({ artwork: [ { src: 'foo.jpg' } ] });
+ assert_equals(metadata.artwork[0].type, '');
+ assert_equals(metadata.artwork[0].sizes, '');
+}, "Test MediaImage default values");
+
+test(function() {
+ assert_throws_js(TypeError, _ => {
+ new MediaMetadata({ artwork: [ {} ] });
+ });
+
+ var metadata = new MediaMetadata();
+ assert_throws_js(TypeError, _ => {
+ metadata.artwork = [ { type: 'image/png', sizes: '40x40' } ];
+ });
+}, "Test that MediaImage.src is required")
+
+promise_test(async t => {
+ const URLs = [
+ 'http://example.com',
+ '../foo',
+ './foo/bar',
+ '/foo/bar',
+ ];
+ const subframe = await load_iframe('helper/artwork-generator.html');
+ // createArtworkFromURLs is a function in the subframe.
+ const artwork = subframe.contentWindow.createArtworkFromURLs(URLs);
+
+ assert_equals(artwork.length, URLs.length);
+ for (let i = 0 ; i < artwork.length ; ++i) {
+ assert_equals(artwork[i].src, new URL(URLs[i], document.URL).href);
+ }
+}, 'Test that the base URL of MediaImage is the base URL of entry setting object');
+
+</script>
diff --git a/testing/web-platform/tests/mediasession/playbackstate.html b/testing/web-platform/tests/mediasession/playbackstate.html
new file mode 100644
index 0000000000..63f3548d94
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/playbackstate.html
@@ -0,0 +1,27 @@
+<!DOCTYPE html>
+<title>MediaSession.playbackState attribute</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+test(function() {
+ assert_equals(window.navigator.mediaSession.playbackState, "none");
+}, 'Test that playbackState is initialized as "none"');
+
+test(function() {
+ var states = [ "paused", "playing", "none" ];
+ for (let state of states) {
+ window.navigator.mediaSession.playbackState = state;
+ assert_equals(window.navigator.mediaSession.playbackState, state);
+ }
+}, 'Test that playbackState is read/write');
+
+test(function() {
+ var invalidStates = [ "invalid", "" ];
+ for (let state of invalidStates) {
+ window.navigator.mediaSession.playbackState = state;
+ assert_equals(window.navigator.mediaSession.playbackState, "none");
+ }
+}, 'Test that warning is thrown when setting invalid playbackState');
+
+</script>
diff --git a/testing/web-platform/tests/mediasession/positionstate.html b/testing/web-platform/tests/mediasession/positionstate.html
new file mode 100644
index 0000000000..9141091a90
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/positionstate.html
@@ -0,0 +1,106 @@
+<!DOCTYPE html>
+<title>MediaSession.setPositionState</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+test(() => {
+ window.navigator.mediaSession.setPositionState(null);
+}, 'Test setPositionState with a null value');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9,
+ position: 10.1,
+ playbackRate: 1.5
+ });
+}, 'Test setPositionState with a valid value for forward playback');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9,
+ playbackRate: 1.0
+ });
+}, 'Test setPositionState with optional position');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9,
+ position: 10.1
+ });
+}, 'Test setPositionState with optional playback rate');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9
+ });
+}, 'Test setPositionState with only duration');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 0
+ });
+}, 'Test setPositionState with zero duration');
+
+test(() => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9,
+ position: 10.1,
+ playbackRate: -2.0
+ });
+}, 'Test setPositionState with negative playback rate');
+
+test(() => {
+ assert_throws_js(
+ TypeError,
+ _ => {
+ window.navigator.mediaSession.setPositionState({
+ duration: -1
+ });
+ });
+}, 'Test setPositionState throws a TypeError if duration is negative');
+
+test(() => {
+ assert_throws_js(
+ TypeError,
+ _ => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 10,
+ position: -1
+ });
+ });
+}, 'Test setPositionState throws a TypeError if position is negative');
+
+test(() => {
+ assert_throws_js(
+ TypeError,
+ _ => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 10,
+ position: 20
+ });
+ });
+}, 'Test setPositionState throws a TypeError if duration is less than position');
+
+test(() => {
+ assert_throws_js(
+ TypeError,
+ _ => {
+ window.navigator.mediaSession.setPositionState({
+ duration: 60.9,
+ position: 10.1,
+ playbackRate: 0.0
+ });
+ });
+}, 'Test setPositionState throws a TypeError if playback rate is zero');
+
+test(() => {
+ assert_throws_js(
+ TypeError,
+ _ => {
+ window.navigator.mediaSession.setPositionState({
+ position: 10.1,
+ playbackRate: 1.0
+ });
+ });
+}, 'Test setPositionState throws a TypeError if duration is not specified');
+</script>
diff --git a/testing/web-platform/tests/mediasession/setactionhandler.html b/testing/web-platform/tests/mediasession/setactionhandler.html
new file mode 100644
index 0000000000..a9df51968c
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/setactionhandler.html
@@ -0,0 +1,34 @@
+<!DOCTYPE html>
+<title>Test that setting MediaSession event handler should notify the service</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+[
+ "play",
+ "pause",
+ "previoustrack",
+ "nexttrack",
+ "seekbackward",
+ "seekforward",
+ "stop",
+ "seekto",
+ "skipad",
+ "togglemicrophone",
+ "togglecamera",
+ "hangup",
+ "previousslide",
+ "nextslide",
+].forEach((action) =>
+ test((t) => {
+ window.navigator.mediaSession.setActionHandler(action, null);
+ }, `Test that setActionHandler("${action}") succeeds`)
+);
+
+test(function(t) {
+ assert_throws_js(
+ TypeError,
+ _ => { window.navigator.mediaSession.setActionHandler("invalid", null); });
+}, "Test that setActionHandler() throws exception for unsupported actions");
+
+</script>
diff --git a/testing/web-platform/tests/mediasession/setcameraactive.html b/testing/web-platform/tests/mediasession/setcameraactive.html
new file mode 100644
index 0000000000..d5cd6a93f1
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/setcameraactive.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<title>MediaSession.setCameraActive</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+test(function(t) {
+ window.navigator.mediaSession.setCameraActive(true);
+ window.navigator.mediaSession.setCameraActive(false);
+}, "Test that setCameraActive() can be executed for boolean values");
+
+</script>
diff --git a/testing/web-platform/tests/mediasession/setmicrophoneactive.html b/testing/web-platform/tests/mediasession/setmicrophoneactive.html
new file mode 100644
index 0000000000..fbb4c612d9
--- /dev/null
+++ b/testing/web-platform/tests/mediasession/setmicrophoneactive.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<title>MediaSession.setMicrophoneActive</title>
+<script src=/resources/testharness.js></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+
+test(function(t) {
+ window.navigator.mediaSession.setMicrophoneActive(true);
+ window.navigator.mediaSession.setMicrophoneActive(false);
+}, "Test that setMicrophoneActive() can be executed for boolean values");
+
+</script>