summaryrefslogtreecommitdiffstats
path: root/dom/media/mediasource
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/mediasource')
-rw-r--r--dom/media/mediasource/AsyncEventRunner.h32
-rw-r--r--dom/media/mediasource/ContainerParser.cpp767
-rw-r--r--dom/media/mediasource/ContainerParser.h97
-rw-r--r--dom/media/mediasource/MediaSource.cpp698
-rw-r--r--dom/media/mediasource/MediaSource.h182
-rw-r--r--dom/media/mediasource/MediaSourceDecoder.cpp372
-rw-r--r--dom/media/mediasource/MediaSourceDecoder.h101
-rw-r--r--dom/media/mediasource/MediaSourceDemuxer.cpp530
-rw-r--r--dom/media/mediasource/MediaSourceDemuxer.h172
-rw-r--r--dom/media/mediasource/MediaSourceUtils.cpp49
-rw-r--r--dom/media/mediasource/MediaSourceUtils.h20
-rw-r--r--dom/media/mediasource/ResourceQueue.cpp204
-rw-r--r--dom/media/mediasource/ResourceQueue.h88
-rw-r--r--dom/media/mediasource/SourceBuffer.cpp765
-rw-r--r--dom/media/mediasource/SourceBuffer.h207
-rw-r--r--dom/media/mediasource/SourceBufferAttributes.h116
-rw-r--r--dom/media/mediasource/SourceBufferList.cpp187
-rw-r--r--dom/media/mediasource/SourceBufferList.h110
-rw-r--r--dom/media/mediasource/SourceBufferResource.cpp144
-rw-r--r--dom/media/mediasource/SourceBufferResource.h143
-rw-r--r--dom/media/mediasource/SourceBufferTask.h126
-rw-r--r--dom/media/mediasource/TrackBuffersManager.cpp3092
-rw-r--r--dom/media/mediasource/TrackBuffersManager.h568
-rw-r--r--dom/media/mediasource/gtest/TestContainerParser.cpp148
-rw-r--r--dom/media/mediasource/gtest/TestExtractAV1CodecDetails.cpp290
-rw-r--r--dom/media/mediasource/gtest/TestExtractVPXCodecDetails.cpp141
-rw-r--r--dom/media/mediasource/gtest/moz.build22
-rw-r--r--dom/media/mediasource/moz.build42
-rw-r--r--dom/media/mediasource/test/.eslintrc.js28
-rw-r--r--dom/media/mediasource/test/1516754.webmbin0 -> 1081344 bytes
-rw-r--r--dom/media/mediasource/test/1516754.webm^headers^1
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-1.m4sbin0 -> 24328 bytes
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-2.m4sbin0 -> 24132 bytes
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-init.mp4bin0 -> 1246 bytes
-rw-r--r--dom/media/mediasource/test/aac20-48000-64000-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-1.m4sbin0 -> 48979 bytes
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-2.m4sbin0 -> 47727 bytes
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-init.mp4bin0 -> 634 bytes
-rw-r--r--dom/media/mediasource/test/aac51-48000-128000-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/avc3/init.mp4bin0 -> 687 bytes
-rw-r--r--dom/media/mediasource/test/avc3/init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/avc3/segment1.m4sbin0 -> 696869 bytes
-rw-r--r--dom/media/mediasource/test/avc3/segment1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop1.m4sbin0 -> 24424 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop10.m4sbin0 -> 18279 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop10.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop11.m4sbin0 -> 24607 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop11.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop12.m4sbin0 -> 22676 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop12.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop13.m4sbin0 -> 9847 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop13.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop2.m4sbin0 -> 22205 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop2s.mp4bin0 -> 48024 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop2s.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop3.m4sbin0 -> 24013 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop3.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop4.m4sbin0 -> 23112 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop4.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop5.m4sbin0 -> 18367 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop5.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop6.m4sbin0 -> 24455 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop6.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop7.m4sbin0 -> 22442 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop7.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop8.m4sbin0 -> 24356 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop8.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop9.m4sbin0 -> 23252 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop9.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_300-3s.webmbin0 -> 79429 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_300-3s.webm^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4sbin0 -> 66806 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4sbin0 -> 65292 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4bin0 -> 1410 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio1.m4sbin0 -> 694 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio10.m4sbin0 -> 879 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio10.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio11.m4sbin0 -> 208 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio11.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio2.m4sbin0 -> 750 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio3.m4sbin0 -> 724 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio3.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio4.m4sbin0 -> 806 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio4.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio5.m4sbin0 -> 822 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio5.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio6.m4sbin0 -> 833 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio6.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio7.m4sbin0 -> 888 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio7.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio8.m4sbin0 -> 829 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio8.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio9.m4sbin0 -> 778 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audio9.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4bin0 -> 825 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_dash.mpd48
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4bin0 -> 1441 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4sbin0 -> 110108 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4sbin0 -> 116079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4bin0 -> 1453 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4sbin0 -> 143079 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4sbin0 -> 137858 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4bin0 -> 1336 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4sbin0 -> 1023860 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video1.m4sbin0 -> 23860 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video1.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video10.m4sbin0 -> 18109 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video10.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video11.m4sbin0 -> 23969 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video11.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video12.m4sbin0 -> 21937 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video12.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video13.m4sbin0 -> 16265 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video13.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video2.m4sbin0 -> 21595 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video2.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video3.m4sbin0 -> 23429 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video3.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video4.m4sbin0 -> 22446 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video4.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video5.m4sbin0 -> 18191 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video5.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video6.m4sbin0 -> 23773 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video6.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video7.m4sbin0 -> 21749 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video7.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video8.m4sbin0 -> 23608 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video8.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video9.m4sbin0 -> 22553 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_video9.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4bin0 -> 887 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bipbop/bipbopinit.mp4bin0 -> 1395 bytes
-rw-r--r--dom/media/mediasource/test/bipbop/bipbopinit.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/bug1718709_high_res.mp4bin0 -> 1038283 bytes
-rw-r--r--dom/media/mediasource/test/bug1718709_low_res.mp4bin0 -> 245318 bytes
-rw-r--r--dom/media/mediasource/test/crashtests/1005366.html27
-rw-r--r--dom/media/mediasource/test/crashtests/1059035.html26
-rw-r--r--dom/media/mediasource/test/crashtests/926665.html26
-rw-r--r--dom/media/mediasource/test/crashtests/931388.html17
-rw-r--r--dom/media/mediasource/test/crashtests/crashtests.list4
-rw-r--r--dom/media/mediasource/test/flac/00001.m4sbin0 -> 658125 bytes
-rw-r--r--dom/media/mediasource/test/flac/00001.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/flac/00002.m4sbin0 -> 685567 bytes
-rw-r--r--dom/media/mediasource/test/flac/00002.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/flac/00003.m4sbin0 -> 747868 bytes
-rw-r--r--dom/media/mediasource/test/flac/00003.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/flac/IS.mp4bin0 -> 608 bytes
-rw-r--r--dom/media/mediasource/test/flac/IS.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/init-trackid2.mp4bin0 -> 9108 bytes
-rw-r--r--dom/media/mediasource/test/init-trackid2.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/init-trackid3.mp4bin0 -> 9108 bytes
-rw-r--r--dom/media/mediasource/test/init-trackid3.mp4^headers^1
-rw-r--r--dom/media/mediasource/test/mediasource.js235
-rw-r--r--dom/media/mediasource/test/mochitest.ini213
-rw-r--r--dom/media/mediasource/test/seek.webmbin0 -> 215529 bytes
-rw-r--r--dom/media/mediasource/test/seek.webm^headers^1
-rw-r--r--dom/media/mediasource/test/seek_lowres.webmbin0 -> 100749 bytes
-rw-r--r--dom/media/mediasource/test/seek_lowres.webm^headers^1
-rw-r--r--dom/media/mediasource/test/segment-2.0001.m4sbin0 -> 34778 bytes
-rw-r--r--dom/media/mediasource/test/segment-2.0001.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/segment-2.0002.m4sbin0 -> 34653 bytes
-rw-r--r--dom/media/mediasource/test/segment-2.0002.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/segment-3.0001.m4sbin0 -> 34787 bytes
-rw-r--r--dom/media/mediasource/test/segment-3.0001.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/segment-3.0002.m4sbin0 -> 34640 bytes
-rw-r--r--dom/media/mediasource/test/segment-3.0002.m4s^headers^1
-rw-r--r--dom/media/mediasource/test/tags_before_cluster.webmbin0 -> 111714 bytes
-rw-r--r--dom/media/mediasource/test/tags_before_cluster.webm^header^1
-rw-r--r--dom/media/mediasource/test/test_AVC3_mp4.html38
-rw-r--r--dom/media/mediasource/test/test_AbortAfterPartialMediaSegment.html62
-rw-r--r--dom/media/mediasource/test/test_AppendPartialInitSegment.html43
-rw-r--r--dom/media/mediasource/test/test_AudioChange_mp4.html49
-rw-r--r--dom/media/mediasource/test/test_AudioChange_mp4_WebAudio.html55
-rw-r--r--dom/media/mediasource/test/test_AutoRevocation.html40
-rw-r--r--dom/media/mediasource/test/test_BufferedSeek.html44
-rw-r--r--dom/media/mediasource/test/test_BufferedSeek_mp4.html43
-rw-r--r--dom/media/mediasource/test/test_BufferingWait.html52
-rw-r--r--dom/media/mediasource/test/test_BufferingWait_mp4.html49
-rw-r--r--dom/media/mediasource/test/test_ChangeType.html84
-rw-r--r--dom/media/mediasource/test/test_ChangeWhileWaitingOnMissingData_mp4.html37
-rw-r--r--dom/media/mediasource/test/test_DifferentStreamStartTimes.html54
-rw-r--r--dom/media/mediasource/test/test_DrainOnMissingData_mp4.html49
-rw-r--r--dom/media/mediasource/test/test_DurationChange.html71
-rw-r--r--dom/media/mediasource/test/test_DurationUpdated.html48
-rw-r--r--dom/media/mediasource/test/test_DurationUpdated_mp4.html47
-rw-r--r--dom/media/mediasource/test/test_EndOfStream.html29
-rw-r--r--dom/media/mediasource/test/test_EndOfStream_mp4.html29
-rw-r--r--dom/media/mediasource/test/test_EndedEvent.html31
-rw-r--r--dom/media/mediasource/test/test_Eviction_mp4.html63
-rw-r--r--dom/media/mediasource/test/test_ExperimentalAsync.html102
-rw-r--r--dom/media/mediasource/test/test_FrameSelection.html64
-rw-r--r--dom/media/mediasource/test/test_FrameSelection_mp4.html49
-rw-r--r--dom/media/mediasource/test/test_HEAAC_extradata.html89
-rw-r--r--dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek.html38
-rw-r--r--dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek_mp4.html42
-rw-r--r--dom/media/mediasource/test/test_InputBufferIsCleared.html58
-rw-r--r--dom/media/mediasource/test/test_LiveSeekable.html84
-rw-r--r--dom/media/mediasource/test/test_LoadedDataFired_mp4.html57
-rw-r--r--dom/media/mediasource/test/test_LoadedMetadataFired.html31
-rw-r--r--dom/media/mediasource/test/test_LoadedMetadataFired_mp4.html31
-rw-r--r--dom/media/mediasource/test/test_MediaSource.html92
-rw-r--r--dom/media/mediasource/test/test_MediaSource_capture_gc.html72
-rw-r--r--dom/media/mediasource/test/test_MediaSource_disabled.html31
-rw-r--r--dom/media/mediasource/test/test_MediaSource_flac_mp4.html33
-rw-r--r--dom/media/mediasource/test/test_MediaSource_memory_reporting.html47
-rw-r--r--dom/media/mediasource/test/test_MediaSource_mp4.html90
-rw-r--r--dom/media/mediasource/test/test_MultipleInitSegments.html49
-rw-r--r--dom/media/mediasource/test/test_MultipleInitSegments_mp4.html44
-rw-r--r--dom/media/mediasource/test/test_NoAudioLoopBackData.html78
-rw-r--r--dom/media/mediasource/test/test_NoAudioLoopBackData_Muted.html79
-rw-r--r--dom/media/mediasource/test/test_NoVideoLoopBackData.html81
-rw-r--r--dom/media/mediasource/test/test_OnEvents.html42
-rw-r--r--dom/media/mediasource/test/test_PlayEvents.html115
-rw-r--r--dom/media/mediasource/test/test_PlayEventsAutoPlaying.html58
-rw-r--r--dom/media/mediasource/test/test_PlayEventsAutoPlaying2.html58
-rw-r--r--dom/media/mediasource/test/test_RemoveSourceBuffer.html52
-rw-r--r--dom/media/mediasource/test/test_Resolution_change_should_not_cause_video_freeze.html49
-rw-r--r--dom/media/mediasource/test/test_ResumeAfterClearing_mp4.html44
-rw-r--r--dom/media/mediasource/test/test_SeekNoData_mp4.html57
-rw-r--r--dom/media/mediasource/test/test_SeekToEnd_mp4.html54
-rw-r--r--dom/media/mediasource/test/test_SeekToLastFrame_mp4.html34
-rw-r--r--dom/media/mediasource/test/test_SeekTwice_mp4.html45
-rw-r--r--dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream.html54
-rw-r--r--dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit.html60
-rw-r--r--dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit_mp4.html60
-rw-r--r--dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream_mp4.html55
-rw-r--r--dom/media/mediasource/test/test_SeekedEvent_mp4.html48
-rw-r--r--dom/media/mediasource/test/test_Sequence_mp4.html37
-rw-r--r--dom/media/mediasource/test/test_SetModeThrows.html34
-rw-r--r--dom/media/mediasource/test/test_SplitAppend.html36
-rw-r--r--dom/media/mediasource/test/test_SplitAppendDelay.html38
-rw-r--r--dom/media/mediasource/test/test_SplitAppendDelay_mp4.html39
-rw-r--r--dom/media/mediasource/test/test_SplitAppend_mp4.html38
-rw-r--r--dom/media/mediasource/test/test_Threshold_mp4.html73
-rw-r--r--dom/media/mediasource/test/test_TimestampOffset_mp4.html76
-rw-r--r--dom/media/mediasource/test/test_TruncatedDuration.html55
-rw-r--r--dom/media/mediasource/test/test_TruncatedDuration_mp4.html59
-rw-r--r--dom/media/mediasource/test/test_WMFUnmatchedAudioDataTime.html32
-rw-r--r--dom/media/mediasource/test/test_WaitingOnMissingData.html60
-rw-r--r--dom/media/mediasource/test/test_WaitingOnMissingDataEnded_mp4.html47
-rw-r--r--dom/media/mediasource/test/test_WaitingOnMissingData_mp4.html61
-rw-r--r--dom/media/mediasource/test/test_WaitingToEndedTransition_mp4.html52
-rw-r--r--dom/media/mediasource/test/test_WebMTagsBeforeCluster.html47
-rw-r--r--dom/media/mediasource/test/test_trackidchange_mp4.html32
-rw-r--r--dom/media/mediasource/test/whitenoise-he-aac-5s.mp4bin0 -> 27078 bytes
-rw-r--r--dom/media/mediasource/test/wmf_mismatchedaudiotime.mp4bin0 -> 48906 bytes
325 files changed, 14200 insertions, 0 deletions
diff --git a/dom/media/mediasource/AsyncEventRunner.h b/dom/media/mediasource/AsyncEventRunner.h
new file mode 100644
index 0000000000..37a7a1b6b3
--- /dev/null
+++ b/dom/media/mediasource/AsyncEventRunner.h
@@ -0,0 +1,32 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_ASYNCEVENTRUNNER_H_
+#define MOZILLA_ASYNCEVENTRUNNER_H_
+
+#include "nsThreadUtils.h"
+
+namespace mozilla {
+
+template <typename T>
+class AsyncEventRunner : public Runnable {
+ public:
+ AsyncEventRunner(T* aTarget, const char* aName)
+ : Runnable("AsyncEventRunner"), mTarget(aTarget), mName(aName) {}
+
+ NS_IMETHOD Run() override {
+ mTarget->DispatchSimpleEvent(mName);
+ return NS_OK;
+ }
+
+ private:
+ RefPtr<T> mTarget;
+ const char* mName;
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_ASYNCEVENTRUNNER_H_ */
diff --git a/dom/media/mediasource/ContainerParser.cpp b/dom/media/mediasource/ContainerParser.cpp
new file mode 100644
index 0000000000..b31ee42d67
--- /dev/null
+++ b/dom/media/mediasource/ContainerParser.cpp
@@ -0,0 +1,767 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "ContainerParser.h"
+
+#include "WebMBufferedParser.h"
+#include "mozilla/EndianUtils.h"
+#include "mozilla/IntegerPrintfMacros.h"
+#include "mozilla/ErrorResult.h"
+#include "MoofParser.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/Result.h"
+#include "MediaData.h"
+#include "nsMimeTypes.h"
+#ifdef MOZ_FMP4
+# include "AtomType.h"
+# include "BufferReader.h"
+# include "ByteStream.h"
+# include "MP4Interval.h"
+# include "SampleIterator.h"
+#endif
+#include "SourceBufferResource.h"
+#include <algorithm>
+
+extern mozilla::LogModule* GetMediaSourceSamplesLog();
+
+#define MSE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceSamplesLog(), mozilla::LogLevel::Debug, \
+ "(%s)::%s: " arg, mType.OriginalString().Data(), __func__, \
+ ##__VA_ARGS__)
+#define MSE_DEBUGV(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceSamplesLog(), mozilla::LogLevel::Verbose, \
+ "(%s)::%s: " arg, mType.OriginalString().Data(), __func__, \
+ ##__VA_ARGS__)
+#define MSE_DEBUGVEX(_this, arg, ...) \
+ DDMOZ_LOGEX(_this, GetMediaSourceSamplesLog(), mozilla::LogLevel::Verbose, \
+ "(%s)::%s: " arg, mType.OriginalString().Data(), __func__, \
+ ##__VA_ARGS__)
+
+namespace mozilla {
+
+ContainerParser::ContainerParser(const MediaContainerType& aType)
+ : mHasInitData(false), mTotalParsed(0), mGlobalOffset(0), mType(aType) {}
+
+ContainerParser::~ContainerParser() = default;
+
+MediaResult ContainerParser::IsInitSegmentPresent(const MediaSpan& aData) {
+ MSE_DEBUG(
+ "aLength=%zu [%x%x%x%x]", aData.Length(),
+ aData.Length() > 0 ? aData[0] : 0, aData.Length() > 1 ? aData[1] : 0,
+ aData.Length() > 2 ? aData[2] : 0, aData.Length() > 3 ? aData[3] : 0);
+ return NS_ERROR_NOT_AVAILABLE;
+}
+
+MediaResult ContainerParser::IsMediaSegmentPresent(const MediaSpan& aData) {
+ MSE_DEBUG(
+ "aLength=%zu [%x%x%x%x]", aData.Length(),
+ aData.Length() > 0 ? aData[0] : 0, aData.Length() > 1 ? aData[1] : 0,
+ aData.Length() > 2 ? aData[2] : 0, aData.Length() > 3 ? aData[3] : 0);
+ return NS_ERROR_NOT_AVAILABLE;
+}
+
+MediaResult ContainerParser::ParseStartAndEndTimestamps(const MediaSpan& aData,
+ media::TimeUnit& aStart,
+ media::TimeUnit& aEnd) {
+ return NS_ERROR_NOT_AVAILABLE;
+}
+
+bool ContainerParser::TimestampsFuzzyEqual(int64_t aLhs, int64_t aRhs) {
+ return llabs(aLhs - aRhs) <= GetRoundingError();
+}
+
+int64_t ContainerParser::GetRoundingError() {
+ NS_WARNING("Using default ContainerParser::GetRoundingError implementation");
+ return 0;
+}
+
+bool ContainerParser::HasCompleteInitData() {
+ return mHasInitData && !!mInitData->Length();
+}
+
+MediaByteBuffer* ContainerParser::InitData() { return mInitData; }
+
+MediaByteRange ContainerParser::InitSegmentRange() {
+ return mCompleteInitSegmentRange;
+}
+
+MediaByteRange ContainerParser::MediaHeaderRange() {
+ return mCompleteMediaHeaderRange;
+}
+
+MediaByteRange ContainerParser::MediaSegmentRange() {
+ return mCompleteMediaSegmentRange;
+}
+
+DDLoggedTypeDeclNameAndBase(WebMContainerParser, ContainerParser);
+
+class WebMContainerParser
+ : public ContainerParser,
+ public DecoderDoctorLifeLogger<WebMContainerParser> {
+ public:
+ explicit WebMContainerParser(const MediaContainerType& aType)
+ : ContainerParser(aType), mParser(0), mOffset(0) {}
+
+ static const unsigned NS_PER_USEC = 1000;
+
+ MediaResult IsInitSegmentPresent(const MediaSpan& aData) override {
+ ContainerParser::IsInitSegmentPresent(aData);
+ if (aData.Length() < 4) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ WebMBufferedParser parser(0);
+ nsTArray<WebMTimeDataOffset> mapping;
+ if (auto result = parser.Append(aData.Elements(), aData.Length(), mapping);
+ NS_FAILED(result)) {
+ return result;
+ }
+ return parser.mInitEndOffset > 0 ? NS_OK : NS_ERROR_NOT_AVAILABLE;
+ }
+
+ MediaResult IsMediaSegmentPresent(const MediaSpan& aData) override {
+ ContainerParser::IsMediaSegmentPresent(aData);
+ if (aData.Length() < 4) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ WebMBufferedParser parser(0);
+ nsTArray<WebMTimeDataOffset> mapping;
+ parser.AppendMediaSegmentOnly();
+ if (auto result = parser.Append(aData.Elements(), aData.Length(), mapping);
+ NS_FAILED(result)) {
+ return result;
+ }
+ return parser.GetClusterOffset() >= 0 ? NS_OK : NS_ERROR_NOT_AVAILABLE;
+ }
+
+ MediaResult ParseStartAndEndTimestamps(const MediaSpan& aData,
+ media::TimeUnit& aStart,
+ media::TimeUnit& aEnd) override {
+ bool initSegment = NS_SUCCEEDED(IsInitSegmentPresent(aData));
+
+ if (mLastMapping &&
+ (initSegment || NS_SUCCEEDED(IsMediaSegmentPresent(aData)))) {
+ // The last data contained a complete cluster but we can only detect it
+ // now that a new one is starting.
+ // We use mOffset as end position to ensure that any blocks not reported
+ // by WebMBufferParser are properly skipped.
+ mCompleteMediaSegmentRange =
+ MediaByteRange(mLastMapping.ref().mSyncOffset, mOffset) +
+ mGlobalOffset;
+ mLastMapping.reset();
+ MSE_DEBUG("New cluster found at start, ending previous one");
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ if (initSegment) {
+ mOffset = 0;
+ mParser = WebMBufferedParser(0);
+ mOverlappedMapping.Clear();
+ mInitData = new MediaByteBuffer();
+ mResource = new SourceBufferResource();
+ DDLINKCHILD("resource", mResource.get());
+ mCompleteInitSegmentRange = MediaByteRange();
+ mCompleteMediaHeaderRange = MediaByteRange();
+ mCompleteMediaSegmentRange = MediaByteRange();
+ mGlobalOffset = mTotalParsed;
+ }
+
+ // XXX if it only adds new mappings, overlapped but not available
+ // (e.g. overlap < 0) frames are "lost" from the reported mappings here.
+ nsTArray<WebMTimeDataOffset> mapping;
+ mapping.AppendElements(mOverlappedMapping);
+ mOverlappedMapping.Clear();
+ if (auto result = mParser.Append(aData.Elements(), aData.Length(), mapping);
+ NS_FAILED(result)) {
+ return result;
+ }
+ if (mResource) {
+ mResource->AppendData(aData);
+ }
+
+ // XXX This is a bit of a hack. Assume if there are no timecodes
+ // present and it's an init segment that it's _just_ an init segment.
+ // We should be more precise.
+ if (initSegment || !HasCompleteInitData()) {
+ if (mParser.mInitEndOffset > 0) {
+ MOZ_DIAGNOSTIC_ASSERT(mInitData && mResource &&
+ mParser.mInitEndOffset <= mResource->GetLength());
+ if (!mInitData->SetLength(mParser.mInitEndOffset, fallible)) {
+ // Super unlikely OOM
+ return NS_ERROR_OUT_OF_MEMORY;
+ }
+ mCompleteInitSegmentRange =
+ MediaByteRange(0, mParser.mInitEndOffset) + mGlobalOffset;
+ char* buffer = reinterpret_cast<char*>(mInitData->Elements());
+ mResource->ReadFromCache(buffer, 0, mParser.mInitEndOffset);
+ MSE_DEBUG("Stashed init of %" PRId64 " bytes.", mParser.mInitEndOffset);
+ mResource = nullptr;
+ } else {
+ MSE_DEBUG("Incomplete init found.");
+ }
+ mHasInitData = true;
+ }
+ mOffset += aData.Length();
+ mTotalParsed += aData.Length();
+
+ if (mapping.IsEmpty()) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ // Calculate media range for first media segment.
+
+ // Check if we have a cluster finishing in the current data.
+ uint32_t endIdx = mapping.Length() - 1;
+ bool foundNewCluster = false;
+ while (mapping[0].mSyncOffset != mapping[endIdx].mSyncOffset) {
+ endIdx -= 1;
+ foundNewCluster = true;
+ }
+
+ int32_t completeIdx = endIdx;
+ while (completeIdx >= 0 && mOffset < mapping[completeIdx].mEndOffset) {
+ MSE_DEBUG("block is incomplete, missing: %" PRId64,
+ mapping[completeIdx].mEndOffset - mOffset);
+ completeIdx -= 1;
+ }
+
+ // Save parsed blocks for which we do not have all data yet.
+ mOverlappedMapping.AppendElements(mapping.Elements() + completeIdx + 1,
+ mapping.Length() - completeIdx - 1);
+
+ if (completeIdx < 0) {
+ mLastMapping.reset();
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ if (mCompleteMediaHeaderRange.IsEmpty()) {
+ mCompleteMediaHeaderRange =
+ MediaByteRange(mapping[0].mSyncOffset, mapping[0].mEndOffset) +
+ mGlobalOffset;
+ }
+
+ if (foundNewCluster && mOffset >= mapping[endIdx].mEndOffset) {
+ // We now have all information required to delimit a complete cluster.
+ int64_t endOffset = mapping[endIdx + 1].mSyncOffset;
+ if (mapping[endIdx + 1].mInitOffset > mapping[endIdx].mInitOffset) {
+ // We have a new init segment before this cluster.
+ endOffset = mapping[endIdx + 1].mInitOffset;
+ }
+ mCompleteMediaSegmentRange =
+ MediaByteRange(mapping[endIdx].mSyncOffset, endOffset) +
+ mGlobalOffset;
+ } else if (mapping[endIdx].mClusterEndOffset >= 0 &&
+ mOffset >= mapping[endIdx].mClusterEndOffset) {
+ mCompleteMediaSegmentRange =
+ MediaByteRange(
+ mapping[endIdx].mSyncOffset,
+ mParser.EndSegmentOffset(mapping[endIdx].mClusterEndOffset)) +
+ mGlobalOffset;
+ }
+
+ Maybe<WebMTimeDataOffset> previousMapping;
+ if (completeIdx) {
+ previousMapping = Some(mapping[completeIdx - 1]);
+ } else {
+ previousMapping = mLastMapping;
+ }
+
+ mLastMapping = Some(mapping[completeIdx]);
+
+ if (!previousMapping && completeIdx + 1u >= mapping.Length()) {
+ // We have no previous nor next block available,
+ // so we can't estimate this block's duration.
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ uint64_t frameDuration =
+ (completeIdx + 1u < mapping.Length())
+ ? mapping[completeIdx + 1].mTimecode -
+ mapping[completeIdx].mTimecode
+ : mapping[completeIdx].mTimecode - previousMapping.ref().mTimecode;
+ aStart = media::TimeUnit::FromNanoseconds(
+ AssertedCast<int64_t>(mapping[0].mTimecode));
+ aEnd = media::TimeUnit::FromNanoseconds(
+ AssertedCast<int64_t>(mapping[completeIdx].mTimecode + frameDuration));
+
+ MSE_DEBUG("[%" PRId64 ", %" PRId64 "] [fso=%" PRId64 ", leo=%" PRId64
+ ", l=%zu processedIdx=%u fs=%" PRId64 "]",
+ aStart.ToMicroseconds(), aEnd.ToMicroseconds(),
+ mapping[0].mSyncOffset, mapping[completeIdx].mEndOffset,
+ mapping.Length(), completeIdx, mCompleteMediaSegmentRange.mEnd);
+
+ return NS_OK;
+ }
+
+ int64_t GetRoundingError() override {
+ int64_t error = mParser.GetTimecodeScale() / NS_PER_USEC;
+ return error * 2;
+ }
+
+ private:
+ WebMBufferedParser mParser;
+ nsTArray<WebMTimeDataOffset> mOverlappedMapping;
+ int64_t mOffset;
+ Maybe<WebMTimeDataOffset> mLastMapping;
+};
+
+#ifdef MOZ_FMP4
+
+DDLoggedTypeDeclNameAndBase(MP4Stream, ByteStream);
+
+class MP4Stream : public ByteStream, public DecoderDoctorLifeLogger<MP4Stream> {
+ public:
+ explicit MP4Stream(SourceBufferResource* aResource);
+ virtual ~MP4Stream();
+ bool ReadAt(int64_t aOffset, void* aBuffer, size_t aCount,
+ size_t* aBytesRead) override;
+ bool CachedReadAt(int64_t aOffset, void* aBuffer, size_t aCount,
+ size_t* aBytesRead) override;
+ bool Length(int64_t* aSize) override;
+ const uint8_t* GetContiguousAccess(int64_t aOffset, size_t aSize) override;
+
+ private:
+ RefPtr<SourceBufferResource> mResource;
+};
+
+MP4Stream::MP4Stream(SourceBufferResource* aResource) : mResource(aResource) {
+ MOZ_COUNT_CTOR(MP4Stream);
+ MOZ_ASSERT(aResource);
+ DDLINKCHILD("resource", aResource);
+}
+
+MP4Stream::~MP4Stream() { MOZ_COUNT_DTOR(MP4Stream); }
+
+bool MP4Stream::ReadAt(int64_t aOffset, void* aBuffer, size_t aCount,
+ size_t* aBytesRead) {
+ return CachedReadAt(aOffset, aBuffer, aCount, aBytesRead);
+}
+
+bool MP4Stream::CachedReadAt(int64_t aOffset, void* aBuffer, size_t aCount,
+ size_t* aBytesRead) {
+ nsresult rv = mResource->ReadFromCache(reinterpret_cast<char*>(aBuffer),
+ aOffset, aCount);
+ if (NS_FAILED(rv)) {
+ *aBytesRead = 0;
+ return false;
+ }
+ *aBytesRead = aCount;
+ return true;
+}
+
+const uint8_t* MP4Stream::GetContiguousAccess(int64_t aOffset, size_t aSize) {
+ return mResource->GetContiguousAccess(aOffset, aSize);
+}
+
+bool MP4Stream::Length(int64_t* aSize) {
+ if (mResource->GetLength() < 0) return false;
+ *aSize = mResource->GetLength();
+ return true;
+}
+
+DDLoggedTypeDeclNameAndBase(MP4ContainerParser, ContainerParser);
+
+class MP4ContainerParser : public ContainerParser,
+ public DecoderDoctorLifeLogger<MP4ContainerParser> {
+ public:
+ explicit MP4ContainerParser(const MediaContainerType& aType)
+ : ContainerParser(aType) {}
+
+ MediaResult IsInitSegmentPresent(const MediaSpan& aData) override {
+ ContainerParser::IsInitSegmentPresent(aData);
+ // Each MP4 atom has a chunk size and chunk type. The root chunk in an MP4
+ // file is the 'ftyp' atom followed by a file type. We just check for a
+ // vaguely valid 'ftyp' atom.
+ if (aData.Length() < 8) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ AtomParser parser(*this, aData, AtomParser::StopAt::eInitSegment);
+ if (!parser.IsValid()) {
+ return MediaResult(
+ NS_ERROR_FAILURE,
+ RESULT_DETAIL("Invalid Top-Level Box:%s", parser.LastInvalidBox()));
+ }
+ return parser.StartWithInitSegment() ? NS_OK : NS_ERROR_NOT_AVAILABLE;
+ }
+
+ MediaResult IsMediaSegmentPresent(const MediaSpan& aData) override {
+ if (aData.Length() < 8) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ AtomParser parser(*this, aData, AtomParser::StopAt::eMediaSegment);
+ if (!parser.IsValid()) {
+ return MediaResult(
+ NS_ERROR_FAILURE,
+ RESULT_DETAIL("Invalid Box:%s", parser.LastInvalidBox()));
+ }
+ return parser.StartWithMediaSegment() ? NS_OK : NS_ERROR_NOT_AVAILABLE;
+ }
+
+ private:
+ class AtomParser {
+ public:
+ enum class StopAt { eInitSegment, eMediaSegment, eEnd };
+
+ AtomParser(const MP4ContainerParser& aParser, const MediaSpan& aData,
+ StopAt aStop = StopAt::eEnd) {
+ mValid = Init(aParser, aData, aStop).isOk();
+ }
+
+ Result<Ok, nsresult> Init(const MP4ContainerParser& aParser,
+ const MediaSpan& aData, StopAt aStop) {
+ const MediaContainerType mType(
+ aParser.ContainerType()); // for logging macro.
+ BufferReader reader(aData);
+ AtomType initAtom("moov");
+ AtomType mediaAtom("moof");
+ AtomType dataAtom("mdat");
+
+ // Valid top-level boxes defined in ISO/IEC 14496-12 (Table 1)
+ static const AtomType validBoxes[] = {
+ "ftyp", "moov", // init segment
+ "pdin", "free", "sidx", // optional prior moov box
+ "styp", "moof", "mdat", // media segment
+ "mfra", "skip", "meta", "meco", "ssix", "prft", // others.
+ "pssh", // optional with encrypted EME, though ignored.
+ "emsg", // ISO23009-1:2014 Section 5.10.3.3
+ "bloc", "uuid" // boxes accepted by chrome.
+ };
+
+ while (reader.Remaining() >= 8) {
+ uint32_t tmp;
+ MOZ_TRY_VAR(tmp, reader.ReadU32());
+ uint64_t size = tmp;
+ const uint8_t* typec = reader.Peek(4);
+ MOZ_TRY_VAR(tmp, reader.ReadU32());
+ AtomType type(tmp);
+ MSE_DEBUGVEX(&aParser, "Checking atom:'%c%c%c%c' @ %u", typec[0],
+ typec[1], typec[2], typec[3],
+ (uint32_t)reader.Offset() - 8);
+ if (std::find(std::begin(validBoxes), std::end(validBoxes), type) ==
+ std::end(validBoxes)) {
+ // No valid box found, no point continuing.
+ mLastInvalidBox[0] = typec[0];
+ mLastInvalidBox[1] = typec[1];
+ mLastInvalidBox[2] = typec[2];
+ mLastInvalidBox[3] = typec[3];
+ mLastInvalidBox[4] = '\0';
+ return Err(NS_ERROR_FAILURE);
+ }
+ if (mInitOffset.isNothing() && AtomType(type) == initAtom) {
+ mInitOffset = Some(reader.Offset());
+ }
+ if (mMediaOffset.isNothing() && AtomType(type) == mediaAtom) {
+ mMediaOffset = Some(reader.Offset());
+ }
+ if (mDataOffset.isNothing() && AtomType(type) == dataAtom) {
+ mDataOffset = Some(reader.Offset());
+ }
+ if (size == 1) {
+ // 64 bits size.
+ MOZ_TRY_VAR(size, reader.ReadU64());
+ } else if (size == 0) {
+ // Atom extends to the end of the buffer, it can't have what we're
+ // looking for.
+ break;
+ }
+ if (reader.Remaining() < size - 8) {
+ // Incomplete atom.
+ break;
+ }
+ reader.Read(size - 8);
+
+ if (aStop == StopAt::eInitSegment && (mInitOffset || mMediaOffset)) {
+ // When we're looking for an init segment, if we encountered a media
+ // segment, it we will need to be processed first. So we can stop
+ // right away if we have found a media segment.
+ break;
+ }
+ if (aStop == StopAt::eMediaSegment &&
+ (mInitOffset || (mMediaOffset && mDataOffset))) {
+ // When we're looking for a media segment, if we encountered an init
+ // segment, it we will need to be processed first. So we can stop
+ // right away if we have found an init segment.
+ break;
+ }
+ }
+
+ return Ok();
+ }
+
+ bool StartWithInitSegment() const {
+ return mInitOffset.isSome() && (mMediaOffset.isNothing() ||
+ mInitOffset.ref() < mMediaOffset.ref());
+ }
+ bool StartWithMediaSegment() const {
+ return mMediaOffset.isSome() && (mInitOffset.isNothing() ||
+ mMediaOffset.ref() < mInitOffset.ref());
+ }
+ bool IsValid() const { return mValid; }
+ const char* LastInvalidBox() const { return mLastInvalidBox; }
+
+ private:
+ Maybe<size_t> mInitOffset;
+ Maybe<size_t> mMediaOffset;
+ Maybe<size_t> mDataOffset;
+ bool mValid;
+ char mLastInvalidBox[5];
+ };
+
+ public:
+ MediaResult ParseStartAndEndTimestamps(const MediaSpan& aData,
+ media::TimeUnit& aStart,
+ media::TimeUnit& aEnd) override {
+ bool initSegment = NS_SUCCEEDED(IsInitSegmentPresent(aData));
+ if (initSegment) {
+ mResource = new SourceBufferResource();
+ DDLINKCHILD("resource", mResource.get());
+ mStream = new MP4Stream(mResource);
+ // We use a timestampOffset of 0 for ContainerParser, and require
+ // consumers of ParseStartAndEndTimestamps to add their timestamp offset
+ // manually. This allows the ContainerParser to be shared across different
+ // timestampOffsets.
+ mParser = MakeUnique<MoofParser>(mStream, AsVariant(ParseAllTracks{}),
+ /* aIsAudio = */ false);
+ DDLINKCHILD("parser", mParser.get());
+ mInitData = new MediaByteBuffer();
+ mCompleteInitSegmentRange = MediaByteRange();
+ mCompleteMediaHeaderRange = MediaByteRange();
+ mCompleteMediaSegmentRange = MediaByteRange();
+ mGlobalOffset = mTotalParsed;
+ } else if (!mStream || !mParser) {
+ mTotalParsed += aData.Length();
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ MOZ_DIAGNOSTIC_ASSERT(mResource && mParser && mInitData,
+ "Should have received an init segment first");
+
+ mResource->AppendData(aData);
+ MediaByteRangeSet byteRanges;
+ byteRanges +=
+ MediaByteRange(int64_t(mParser->mOffset), mResource->GetLength());
+ mParser->RebuildFragmentedIndex(byteRanges);
+
+ if (initSegment || !HasCompleteInitData()) {
+ MediaByteRange& range = mParser->mInitRange;
+ if (range.Length()) {
+ mCompleteInitSegmentRange = range + mGlobalOffset;
+ if (!mInitData->SetLength(range.Length(), fallible)) {
+ // Super unlikely OOM
+ return NS_ERROR_OUT_OF_MEMORY;
+ }
+ char* buffer = reinterpret_cast<char*>(mInitData->Elements());
+ mResource->ReadFromCache(buffer, range.mStart, range.Length());
+ MSE_DEBUG("Stashed init of %" PRIu64 " bytes.", range.Length());
+ } else {
+ MSE_DEBUG("Incomplete init found.");
+ }
+ mHasInitData = true;
+ }
+ mTotalParsed += aData.Length();
+
+ MP4Interval<media::TimeUnit> compositionRange =
+ mParser->GetCompositionRange(byteRanges);
+
+ mCompleteMediaHeaderRange =
+ mParser->FirstCompleteMediaHeader() + mGlobalOffset;
+ mCompleteMediaSegmentRange =
+ mParser->FirstCompleteMediaSegment() + mGlobalOffset;
+
+ if (HasCompleteInitData()) {
+ mResource->EvictData(mParser->mOffset, mParser->mOffset);
+ }
+
+ if (compositionRange.IsNull()) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ aStart = compositionRange.start;
+ aEnd = compositionRange.end;
+ MSE_DEBUG("[%" PRId64 ", %" PRId64 "]", aStart.ToMicroseconds(),
+ aEnd.ToMicroseconds());
+ return NS_OK;
+ }
+
+ // Gaps of up to 35ms (marginally longer than a single frame at 30fps) are
+ // considered to be sequential frames.
+ int64_t GetRoundingError() override { return 35000; }
+
+ private:
+ RefPtr<MP4Stream> mStream;
+ UniquePtr<MoofParser> mParser;
+};
+#endif // MOZ_FMP4
+
+#ifdef MOZ_FMP4
+DDLoggedTypeDeclNameAndBase(ADTSContainerParser, ContainerParser);
+
+class ADTSContainerParser
+ : public ContainerParser,
+ public DecoderDoctorLifeLogger<ADTSContainerParser> {
+ public:
+ explicit ADTSContainerParser(const MediaContainerType& aType)
+ : ContainerParser(aType) {}
+
+ typedef struct {
+ size_t header_length; // Length of just the initialization data.
+ size_t frame_length; // Includes header_length;
+ uint8_t aac_frames; // Number of AAC frames in the ADTS frame.
+ bool have_crc;
+ } Header;
+
+ /// Helper to parse the ADTS header, returning data we care about.
+ /// Returns true if the header is parsed successfully.
+ /// Returns false if the header is invalid or incomplete,
+ /// without modifying the passed-in Header object.
+ bool Parse(const MediaSpan& aData, Header& header) {
+ // ADTS initialization segments are just the packet header.
+ if (aData.Length() < 7) {
+ MSE_DEBUG("buffer too short for header.");
+ return false;
+ }
+ // Check 0xfffx sync word plus layer 0.
+ if ((aData[0] != 0xff) || ((aData[1] & 0xf6) != 0xf0)) {
+ MSE_DEBUG("no syncword.");
+ return false;
+ }
+ bool have_crc = !(aData[1] & 0x01);
+ if (have_crc && aData.Length() < 9) {
+ MSE_DEBUG("buffer too short for header with crc.");
+ return false;
+ }
+ uint8_t frequency_index = (aData[2] & 0x3c) >> 2;
+ MOZ_ASSERT(frequency_index < 16);
+ if (frequency_index == 15) {
+ MSE_DEBUG("explicit frequency disallowed.");
+ return false;
+ }
+ size_t header_length = have_crc ? 9 : 7;
+ size_t data_length = ((aData[3] & 0x03) << 11) | ((aData[4] & 0xff) << 3) |
+ ((aData[5] & 0xe0) >> 5);
+ uint8_t frames = (aData[6] & 0x03) + 1;
+ MOZ_ASSERT(frames > 0);
+ MOZ_ASSERT(frames < 4);
+
+ // Return successfully parsed data.
+ header.header_length = header_length;
+ header.frame_length = header_length + data_length;
+ header.aac_frames = frames;
+ header.have_crc = have_crc;
+ return true;
+ }
+
+ MediaResult IsInitSegmentPresent(const MediaSpan& aData) override {
+ // Call superclass for logging.
+ ContainerParser::IsInitSegmentPresent(aData);
+
+ Header header;
+ if (!Parse(aData, header)) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ MSE_DEBUGV("%llu byte frame %d aac frames%s",
+ (unsigned long long)header.frame_length, (int)header.aac_frames,
+ header.have_crc ? " crc" : "");
+
+ return NS_OK;
+ }
+
+ MediaResult IsMediaSegmentPresent(const MediaSpan& aData) override {
+ // Call superclass for logging.
+ ContainerParser::IsMediaSegmentPresent(aData);
+
+ // Make sure we have a header so we know how long the frame is.
+ // NB this assumes the media segment buffer starts with an
+ // initialization segment. Since every frame has an ADTS header
+ // this is a normal place to divide packets, but we can re-parse
+ // mInitData if we need to handle separate media segments.
+ Header header;
+ if (!Parse(aData, header)) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ // We're supposed to return true as long as aData contains the
+ // start of a media segment, whether or not it's complete. So
+ // return true if we have any data beyond the header.
+ if (aData.Length() <= header.header_length) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ // We should have at least a partial frame.
+ return NS_OK;
+ }
+
+ MediaResult ParseStartAndEndTimestamps(const MediaSpan& aData,
+ media::TimeUnit& aStart,
+ media::TimeUnit& aEnd) override {
+ // ADTS header.
+ Header header;
+ if (!Parse(aData, header)) {
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ mHasInitData = true;
+ mCompleteInitSegmentRange =
+ MediaByteRange(0, int64_t(header.header_length));
+
+ // Cache raw header in case the caller wants a copy.
+ mInitData = new MediaByteBuffer(header.header_length);
+ mInitData->AppendElements(aData.Elements(), header.header_length);
+
+ // Check that we have enough data for the frame body.
+ if (aData.Length() < header.frame_length) {
+ MSE_DEBUGV(
+ "Not enough data for %llu byte frame"
+ " in %llu byte buffer.",
+ (unsigned long long)header.frame_length,
+ (unsigned long long)(aData.Length()));
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ mCompleteMediaSegmentRange =
+ MediaByteRange(header.header_length, header.frame_length);
+ // The ADTS MediaSource Byte Stream Format document doesn't
+ // define media header. Just treat it the same as the whole
+ // media segment.
+ mCompleteMediaHeaderRange = mCompleteMediaSegmentRange;
+
+ MSE_DEBUG("[%" PRId64 ", %" PRId64 "]", aStart.ToMicroseconds(),
+ aEnd.ToMicroseconds());
+ // We don't update timestamps, regardless.
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+
+ // Audio shouldn't have gaps.
+ // Especially when we generate the timestamps ourselves.
+ int64_t GetRoundingError() override { return 0; }
+};
+#endif // MOZ_FMP4
+
+/*static*/
+UniquePtr<ContainerParser> ContainerParser::CreateForMIMEType(
+ const MediaContainerType& aType) {
+ if (aType.Type() == MEDIAMIMETYPE(VIDEO_WEBM) ||
+ aType.Type() == MEDIAMIMETYPE(AUDIO_WEBM)) {
+ return MakeUnique<WebMContainerParser>(aType);
+ }
+
+#ifdef MOZ_FMP4
+ if (aType.Type() == MEDIAMIMETYPE(VIDEO_MP4) ||
+ aType.Type() == MEDIAMIMETYPE(AUDIO_MP4)) {
+ return MakeUnique<MP4ContainerParser>(aType);
+ }
+ if (aType.Type() == MEDIAMIMETYPE("audio/aac")) {
+ return MakeUnique<ADTSContainerParser>(aType);
+ }
+#endif
+
+ return MakeUnique<ContainerParser>(aType);
+}
+
+#undef MSE_DEBUG
+#undef MSE_DEBUGV
+#undef MSE_DEBUGVEX
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/ContainerParser.h b/dom/media/mediasource/ContainerParser.h
new file mode 100644
index 0000000000..baac33f545
--- /dev/null
+++ b/dom/media/mediasource/ContainerParser.h
@@ -0,0 +1,97 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_CONTAINERPARSER_H_
+#define MOZILLA_CONTAINERPARSER_H_
+
+#include "mozilla/RefPtr.h"
+#include "mozilla/UniquePtr.h"
+#include "MediaSpan.h"
+#include "MediaContainerType.h"
+#include "MediaResource.h"
+#include "MediaResult.h"
+
+namespace mozilla {
+
+class MediaByteBuffer;
+class SourceBufferResource;
+
+DDLoggedTypeDeclName(ContainerParser);
+
+class ContainerParser : public DecoderDoctorLifeLogger<ContainerParser> {
+ public:
+ explicit ContainerParser(const MediaContainerType& aType);
+ virtual ~ContainerParser();
+
+ // Return true if aData starts with an initialization segment.
+ // The base implementation exists only for debug logging and is expected
+ // to be called first from the overriding implementation.
+ // Return NS_OK if segment is present, NS_ERROR_NOT_AVAILABLE if no sufficient
+ // data is currently available to make a determination. Any other value
+ // indicates an error.
+ virtual MediaResult IsInitSegmentPresent(const MediaSpan& aData);
+
+ // Return true if aData starts with a media segment.
+ // The base implementation exists only for debug logging and is expected
+ // to be called first from the overriding implementation.
+ // Return NS_OK if segment is present, NS_ERROR_NOT_AVAILABLE if no sufficient
+ // data is currently available to make a determination. Any other value
+ // indicates an error.
+ virtual MediaResult IsMediaSegmentPresent(const MediaSpan& aData);
+
+ // Parse aData to extract the start and end frame times from the media
+ // segment. aData may not start on a parser sync boundary. Return NS_OK
+ // if aStart and aEnd have been updated and NS_ERROR_NOT_AVAILABLE otherwise
+ // when no error were encountered.
+ virtual MediaResult ParseStartAndEndTimestamps(const MediaSpan& aData,
+ media::TimeUnit& aStart,
+ media::TimeUnit& aEnd);
+
+ // Compare aLhs and rHs, considering any error that may exist in the
+ // timestamps from the format's base representation. Return true if aLhs
+ // == aRhs within the error epsilon.
+ bool TimestampsFuzzyEqual(int64_t aLhs, int64_t aRhs);
+
+ virtual int64_t GetRoundingError();
+
+ MediaByteBuffer* InitData();
+
+ bool HasInitData() { return mHasInitData; }
+
+ // Return true if a complete initialization segment has been passed
+ // to ParseStartAndEndTimestamps(). The calls below to retrieve
+ // MediaByteRanges will be valid from when this call first succeeds.
+ bool HasCompleteInitData();
+ // Returns the byte range of the first complete init segment, or an empty
+ // range if not complete.
+ MediaByteRange InitSegmentRange();
+ // Returns the byte range of the first complete media segment header,
+ // or an empty range if not complete.
+ MediaByteRange MediaHeaderRange();
+ // Returns the byte range of the first complete media segment or an empty
+ // range if not complete.
+ MediaByteRange MediaSegmentRange();
+
+ static UniquePtr<ContainerParser> CreateForMIMEType(
+ const MediaContainerType& aType);
+
+ const MediaContainerType& ContainerType() const { return mType; }
+
+ protected:
+ RefPtr<MediaByteBuffer> mInitData;
+ RefPtr<SourceBufferResource> mResource;
+ bool mHasInitData;
+ uint64_t mTotalParsed;
+ uint64_t mGlobalOffset;
+ MediaByteRange mCompleteInitSegmentRange;
+ MediaByteRange mCompleteMediaHeaderRange;
+ MediaByteRange mCompleteMediaSegmentRange;
+ const MediaContainerType mType;
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_CONTAINERPARSER_H_ */
diff --git a/dom/media/mediasource/MediaSource.cpp b/dom/media/mediasource/MediaSource.cpp
new file mode 100644
index 0000000000..e38f0fdcb5
--- /dev/null
+++ b/dom/media/mediasource/MediaSource.cpp
@@ -0,0 +1,698 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaSource.h"
+
+#include "AsyncEventRunner.h"
+#include "Benchmark.h"
+#include "DecoderDoctorDiagnostics.h"
+#include "DecoderTraits.h"
+#include "MediaContainerType.h"
+#include "MediaResult.h"
+#include "MediaSourceDemuxer.h"
+#include "MediaSourceUtils.h"
+#include "SourceBuffer.h"
+#include "SourceBufferList.h"
+#include "mozilla/ErrorResult.h"
+#include "mozilla/FloatingPoint.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Sprintf.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/dom/BindingDeclarations.h"
+#include "mozilla/dom/HTMLMediaElement.h"
+#include "mozilla/gfx/gfxVars.h"
+#include "mozilla/mozalloc.h"
+#include "nsDebug.h"
+#include "nsError.h"
+#include "nsIRunnable.h"
+#include "nsIScriptObjectPrincipal.h"
+#include "nsMimeTypes.h"
+#include "nsPIDOMWindow.h"
+#include "nsServiceManagerUtils.h"
+#include "nsString.h"
+#include "nsThreadUtils.h"
+
+#ifdef MOZ_WIDGET_ANDROID
+# include "AndroidBridge.h"
+# include "mozilla/java/HardwareCodecCapabilityUtilsWrappers.h"
+#endif
+
+struct JSContext;
+class JSObject;
+
+mozilla::LogModule* GetMediaSourceLog() {
+ static mozilla::LazyLogModule sLogModule("MediaSource");
+ return sLogModule;
+}
+
+mozilla::LogModule* GetMediaSourceAPILog() {
+ static mozilla::LazyLogModule sLogModule("MediaSource");
+ return sLogModule;
+}
+
+#define MSE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Debug, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+#define MSE_API(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceAPILog(), mozilla::LogLevel::Debug, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+
+// Arbitrary limit.
+static const unsigned int MAX_SOURCE_BUFFERS = 16;
+
+namespace mozilla {
+
+// Returns true if we should enable MSE webm regardless of preferences.
+// 1. If MP4/H264 isn't supported:
+// * Windows XP
+// * Windows Vista and Server 2008 without the optional "Platform Update
+// Supplement"
+// * N/KN editions (Europe and Korea) of Windows 7/8/8.1/10 without the
+// optional "Windows Media Feature Pack"
+// 2. If H264 hardware acceleration is not available.
+// 3. The CPU is considered to be fast enough
+static bool IsVP9Forced(DecoderDoctorDiagnostics* aDiagnostics) {
+ bool mp4supported = DecoderTraits::IsMP4SupportedType(
+ MediaContainerType(MEDIAMIMETYPE(VIDEO_MP4)), aDiagnostics);
+ bool hwsupported = gfx::gfxVars::CanUseHardwareVideoDecoding();
+#ifdef MOZ_WIDGET_ANDROID
+ return !mp4supported || !hwsupported || VP9Benchmark::IsVP9DecodeFast() ||
+ java::HardwareCodecCapabilityUtils::HasHWVP9(false /* aIsEncoder */);
+#else
+ return !mp4supported || !hwsupported || VP9Benchmark::IsVP9DecodeFast();
+#endif
+}
+
+namespace dom {
+
+static void RecordTypeForTelemetry(const nsAString& aType,
+ nsPIDOMWindowInner* aWindow) {
+ Maybe<MediaContainerType> containerType = MakeMediaContainerType(aType);
+ if (!containerType) {
+ return;
+ }
+
+ const MediaMIMEType& mimeType = containerType->Type();
+ if (mimeType == MEDIAMIMETYPE(VIDEO_WEBM)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::VideoWebm);
+ } else if (mimeType == MEDIAMIMETYPE(AUDIO_WEBM)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::AudioWebm);
+ } else if (mimeType == MEDIAMIMETYPE(VIDEO_MP4)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::VideoMp4);
+ } else if (mimeType == MEDIAMIMETYPE(AUDIO_MP4)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::AudioMp4);
+ } else if (mimeType == MEDIAMIMETYPE(VIDEO_MPEG_TS)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::VideoMp2t);
+ } else if (mimeType == MEDIAMIMETYPE(AUDIO_MPEG_TS)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::AudioMp2t);
+ } else if (mimeType == MEDIAMIMETYPE(AUDIO_MP3)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::AudioMpeg);
+ } else if (mimeType == MEDIAMIMETYPE(AUDIO_AAC)) {
+ AccumulateCategorical(
+ mozilla::Telemetry::LABELS_MSE_SOURCE_BUFFER_TYPE::AudioAac);
+ }
+}
+
+/* static */
+void MediaSource::IsTypeSupported(const nsAString& aType,
+ DecoderDoctorDiagnostics* aDiagnostics,
+ ErrorResult& aRv) {
+ if (aType.IsEmpty()) {
+ return aRv.ThrowTypeError("Empty type");
+ }
+
+ Maybe<MediaContainerType> containerType = MakeMediaContainerType(aType);
+ if (!containerType) {
+ return aRv.ThrowNotSupportedError("Unknown type");
+ }
+
+ if (DecoderTraits::CanHandleContainerType(*containerType, aDiagnostics) ==
+ CANPLAY_NO) {
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+
+ bool hasVP9 = false;
+ const MediaCodecs& codecs = containerType->ExtendedType().Codecs();
+ for (const auto& codec : codecs.Range()) {
+ if (IsVP9CodecString(codec)) {
+ hasVP9 = true;
+ break;
+ }
+ }
+
+ // Now we know that this media type could be played.
+ // MediaSource imposes extra restrictions, and some prefs.
+ const MediaMIMEType& mimeType = containerType->Type();
+ if (mimeType == MEDIAMIMETYPE("video/mp4") ||
+ mimeType == MEDIAMIMETYPE("audio/mp4")) {
+ if (!StaticPrefs::media_mediasource_mp4_enabled()) {
+ // Don't leak information about the fact that it's pref-disabled; just act
+ // like we can't play it. Or should this throw "Unknown type"?
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+ if (!StaticPrefs::media_mediasource_vp9_enabled() && hasVP9 &&
+ !IsVP9Forced(aDiagnostics)) {
+ // Don't leak information about the fact that it's pref-disabled; just act
+ // like we can't play it. Or should this throw "Unknown type"?
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+
+ return;
+ }
+ if (mimeType == MEDIAMIMETYPE("video/webm")) {
+ if (!StaticPrefs::media_mediasource_webm_enabled()) {
+ // Don't leak information about the fact that it's pref-disabled; just act
+ // like we can't play it. Or should this throw "Unknown type"?
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+ if (!StaticPrefs::media_mediasource_vp9_enabled() && hasVP9 &&
+ !IsVP9Forced(aDiagnostics)) {
+ // Don't leak information about the fact that it's pref-disabled; just act
+ // like we can't play it. Or should this throw "Unknown type"?
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+ return;
+ }
+ if (mimeType == MEDIAMIMETYPE("audio/webm")) {
+ if (!(StaticPrefs::media_mediasource_webm_enabled() ||
+ StaticPrefs::media_mediasource_webm_audio_enabled())) {
+ // Don't leak information about the fact that it's pref-disabled; just act
+ // like we can't play it. Or should this throw "Unknown type"?
+ return aRv.ThrowNotSupportedError("Can't play type");
+ }
+ return;
+ }
+
+ return aRv.ThrowNotSupportedError("Type not supported in MediaSource");
+}
+
+/* static */
+already_AddRefed<MediaSource> MediaSource::Constructor(
+ const GlobalObject& aGlobal, ErrorResult& aRv) {
+ nsCOMPtr<nsPIDOMWindowInner> window =
+ do_QueryInterface(aGlobal.GetAsSupports());
+ if (!window) {
+ aRv.Throw(NS_ERROR_UNEXPECTED);
+ return nullptr;
+ }
+
+ RefPtr<MediaSource> mediaSource = new MediaSource(window);
+ return mediaSource.forget();
+}
+
+MediaSource::~MediaSource() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("");
+ if (mDecoder) {
+ mDecoder->DetachMediaSource();
+ }
+}
+
+SourceBufferList* MediaSource::SourceBuffers() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT_IF(mReadyState == MediaSourceReadyState::Closed,
+ mSourceBuffers->IsEmpty());
+ return mSourceBuffers;
+}
+
+SourceBufferList* MediaSource::ActiveSourceBuffers() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT_IF(mReadyState == MediaSourceReadyState::Closed,
+ mActiveSourceBuffers->IsEmpty());
+ return mActiveSourceBuffers;
+}
+
+MediaSourceReadyState MediaSource::ReadyState() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return mReadyState;
+}
+
+double MediaSource::Duration() {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (mReadyState == MediaSourceReadyState::Closed) {
+ return UnspecifiedNaN<double>();
+ }
+ MOZ_ASSERT(mDecoder);
+ return mDecoder->GetDuration();
+}
+
+void MediaSource::SetDuration(double aDuration, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (aDuration < 0 || std::isnan(aDuration)) {
+ nsPrintfCString error("Invalid duration value %f", aDuration);
+ MSE_API("SetDuration(aDuration=%f, invalid value)", aDuration);
+ aRv.ThrowTypeError(error);
+ return;
+ }
+ if (mReadyState != MediaSourceReadyState::Open ||
+ mSourceBuffers->AnyUpdating()) {
+ MSE_API("SetDuration(aDuration=%f, invalid state)", aDuration);
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ DurationChange(aDuration, aRv);
+ MSE_API("SetDuration(aDuration=%f, errorCode=%d)", aDuration,
+ aRv.ErrorCodeAsInt());
+}
+
+void MediaSource::SetDuration(const media::TimeUnit& aDuration) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("SetDuration(aDuration=%f)", aDuration.ToSeconds());
+ mDecoder->SetMediaSourceDuration(aDuration);
+}
+
+already_AddRefed<SourceBuffer> MediaSource::AddSourceBuffer(
+ const nsAString& aType, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ DecoderDoctorDiagnostics diagnostics;
+ IsTypeSupported(aType, &diagnostics, aRv);
+ RecordTypeForTelemetry(aType, GetOwner());
+ bool supported = !aRv.Failed();
+ diagnostics.StoreFormatDiagnostics(
+ GetOwner() ? GetOwner()->GetExtantDoc() : nullptr, aType, supported,
+ __func__);
+ MSE_API("AddSourceBuffer(aType=%s)%s", NS_ConvertUTF16toUTF8(aType).get(),
+ supported ? "" : " [not supported]");
+ if (!supported) {
+ return nullptr;
+ }
+ if (mSourceBuffers->Length() >= MAX_SOURCE_BUFFERS) {
+ aRv.Throw(NS_ERROR_DOM_MEDIA_SOURCE_MAX_BUFFER_QUOTA_EXCEEDED_ERR);
+ return nullptr;
+ }
+ if (mReadyState != MediaSourceReadyState::Open) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+ Maybe<MediaContainerType> containerType = MakeMediaContainerType(aType);
+ if (!containerType) {
+ aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
+ return nullptr;
+ }
+ RefPtr<SourceBuffer> sourceBuffer = new SourceBuffer(this, *containerType);
+ mSourceBuffers->Append(sourceBuffer);
+ DDLINKCHILD("sourcebuffer[]", sourceBuffer.get());
+ MSE_DEBUG("sourceBuffer=%p", sourceBuffer.get());
+ return sourceBuffer.forget();
+}
+
+RefPtr<MediaSource::ActiveCompletionPromise> MediaSource::SourceBufferIsActive(
+ SourceBuffer* aSourceBuffer) {
+ MOZ_ASSERT(NS_IsMainThread());
+ mActiveSourceBuffers->ClearSimple();
+ bool initMissing = false;
+ bool found = false;
+ for (uint32_t i = 0; i < mSourceBuffers->Length(); i++) {
+ SourceBuffer* sourceBuffer = mSourceBuffers->IndexedGetter(i, found);
+ MOZ_ALWAYS_TRUE(found);
+ if (sourceBuffer == aSourceBuffer) {
+ mActiveSourceBuffers->Append(aSourceBuffer);
+ } else if (sourceBuffer->IsActive()) {
+ mActiveSourceBuffers->AppendSimple(sourceBuffer);
+ } else {
+ // Some source buffers haven't yet received an init segment.
+ // There's nothing more we can do at this stage.
+ initMissing = true;
+ }
+ }
+ if (initMissing || !mDecoder) {
+ return ActiveCompletionPromise::CreateAndResolve(true, __func__);
+ }
+
+ mDecoder->NotifyInitDataArrived();
+
+ // Add our promise to the queue.
+ // It will be resolved once the HTMLMediaElement modifies its readyState.
+ MozPromiseHolder<ActiveCompletionPromise> holder;
+ RefPtr<ActiveCompletionPromise> promise = holder.Ensure(__func__);
+ mCompletionPromises.AppendElement(std::move(holder));
+ return promise;
+}
+
+void MediaSource::CompletePendingTransactions() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("Resolving %u promises", unsigned(mCompletionPromises.Length()));
+ for (auto& promise : mCompletionPromises) {
+ promise.Resolve(true, __func__);
+ }
+ mCompletionPromises.Clear();
+}
+
+void MediaSource::RemoveSourceBuffer(SourceBuffer& aSourceBuffer,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ SourceBuffer* sourceBuffer = &aSourceBuffer;
+ MSE_API("RemoveSourceBuffer(aSourceBuffer=%p)", sourceBuffer);
+ if (!mSourceBuffers->Contains(sourceBuffer)) {
+ aRv.Throw(NS_ERROR_DOM_NOT_FOUND_ERR);
+ return;
+ }
+
+ sourceBuffer->AbortBufferAppend();
+ // TODO:
+ // abort stream append loop (if running)
+
+ // TODO:
+ // For all sourceBuffer audioTracks, videoTracks, textTracks:
+ // set sourceBuffer to null
+ // remove sourceBuffer video, audio, text Tracks from MediaElement tracks
+ // remove sourceBuffer video, audio, text Tracks and fire "removetrack" at
+ // affected lists fire "removetrack" at modified MediaElement track lists
+ // If removed enabled/selected, fire "change" at affected MediaElement list.
+ if (mActiveSourceBuffers->Contains(sourceBuffer)) {
+ mActiveSourceBuffers->Remove(sourceBuffer);
+ }
+ mSourceBuffers->Remove(sourceBuffer);
+ DDUNLINKCHILD(sourceBuffer);
+ // TODO: Free all resources associated with sourceBuffer
+}
+
+void MediaSource::EndOfStream(
+ const Optional<MediaSourceEndOfStreamError>& aError, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("EndOfStream(aError=%d)",
+ aError.WasPassed() ? uint32_t(aError.Value()) : 0);
+ if (mReadyState != MediaSourceReadyState::Open ||
+ mSourceBuffers->AnyUpdating()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ SetReadyState(MediaSourceReadyState::Ended);
+ mSourceBuffers->Ended();
+ if (!aError.WasPassed()) {
+ DurationChange(mSourceBuffers->GetHighestBufferedEndTime().ToBase(1000000),
+ aRv);
+ // Notify reader that all data is now available.
+ mDecoder->Ended(true);
+ return;
+ }
+ switch (aError.Value()) {
+ case MediaSourceEndOfStreamError::Network:
+ mDecoder->NetworkError(MediaResult(NS_ERROR_FAILURE, "MSE network"));
+ break;
+ case MediaSourceEndOfStreamError::Decode:
+ mDecoder->DecodeError(NS_ERROR_DOM_MEDIA_FATAL_ERR);
+ break;
+ default:
+ MOZ_ASSERT_UNREACHABLE(
+ "Someone added a MediaSourceReadyState value and didn't handle it "
+ "here");
+ break;
+ }
+}
+
+void MediaSource::EndOfStream(const MediaResult& aError) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("EndOfStream(aError=%s)", aError.ErrorName().get());
+
+ SetReadyState(MediaSourceReadyState::Ended);
+ mSourceBuffers->Ended();
+ mDecoder->DecodeError(aError);
+}
+
+/* static */
+bool MediaSource::IsTypeSupported(const GlobalObject& aOwner,
+ const nsAString& aType) {
+ MOZ_ASSERT(NS_IsMainThread());
+ DecoderDoctorDiagnostics diagnostics;
+ IgnoredErrorResult rv;
+ IsTypeSupported(aType, &diagnostics, rv);
+ bool supported = !rv.Failed();
+ nsCOMPtr<nsPIDOMWindowInner> window =
+ do_QueryInterface(aOwner.GetAsSupports());
+ RecordTypeForTelemetry(aType, window);
+ diagnostics.StoreFormatDiagnostics(window ? window->GetExtantDoc() : nullptr,
+ aType, supported, __func__);
+ MOZ_LOG(GetMediaSourceAPILog(), mozilla::LogLevel::Debug,
+ ("MediaSource::%s: IsTypeSupported(aType=%s) %s", __func__,
+ NS_ConvertUTF16toUTF8(aType).get(),
+ supported ? "OK" : "[not supported]"));
+ return supported;
+}
+
+void MediaSource::SetLiveSeekableRange(double aStart, double aEnd,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ // 1. If the readyState attribute is not "open" then throw an
+ // InvalidStateError exception and abort these steps.
+ if (mReadyState != MediaSourceReadyState::Open) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ // 2. If start is negative or greater than end, then throw a TypeError
+ // exception and abort these steps.
+ if (aStart < 0 || aStart > aEnd) {
+ aRv.ThrowTypeError("Invalid start value");
+ return;
+ }
+
+ // 3. Set live seekable range to be a new normalized TimeRanges object
+ // containing a single range whose start position is start and end position is
+ // end.
+ mLiveSeekableRange = Some(media::TimeRanges(media::TimeRange(aStart, aEnd)));
+}
+
+void MediaSource::ClearLiveSeekableRange(ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ // 1. If the readyState attribute is not "open" then throw an
+ // InvalidStateError exception and abort these steps.
+ if (mReadyState != MediaSourceReadyState::Open) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ // 2. If live seekable range contains a range, then set live seekable range to
+ // be a new empty TimeRanges object.
+ mLiveSeekableRange.reset();
+}
+
+bool MediaSource::Attach(MediaSourceDecoder* aDecoder) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("Attach(aDecoder=%p) owner=%p", aDecoder, aDecoder->GetOwner());
+ MOZ_ASSERT(aDecoder);
+ MOZ_ASSERT(aDecoder->GetOwner());
+ if (mReadyState != MediaSourceReadyState::Closed) {
+ return false;
+ }
+ MOZ_ASSERT(!mMediaElement);
+ mMediaElement = aDecoder->GetOwner()->GetMediaElement();
+ MOZ_ASSERT(!mDecoder);
+ mDecoder = aDecoder;
+ mDecoder->AttachMediaSource(this);
+ SetReadyState(MediaSourceReadyState::Open);
+ return true;
+}
+
+void MediaSource::Detach() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_RELEASE_ASSERT(mCompletionPromises.IsEmpty());
+ MSE_DEBUG("mDecoder=%p owner=%p", mDecoder.get(),
+ mDecoder ? mDecoder->GetOwner() : nullptr);
+ if (!mDecoder) {
+ MOZ_ASSERT(mReadyState == MediaSourceReadyState::Closed);
+ MOZ_ASSERT(mActiveSourceBuffers->IsEmpty() && mSourceBuffers->IsEmpty());
+ return;
+ }
+ mMediaElement = nullptr;
+ SetReadyState(MediaSourceReadyState::Closed);
+ if (mActiveSourceBuffers) {
+ mActiveSourceBuffers->Clear();
+ }
+ if (mSourceBuffers) {
+ mSourceBuffers->Clear();
+ }
+ mDecoder->DetachMediaSource();
+ mDecoder = nullptr;
+}
+
+MediaSource::MediaSource(nsPIDOMWindowInner* aWindow)
+ : DOMEventTargetHelper(aWindow),
+ mDecoder(nullptr),
+ mPrincipal(nullptr),
+ mAbstractMainThread(
+ GetOwnerGlobal()->AbstractMainThreadFor(TaskCategory::Other)),
+ mReadyState(MediaSourceReadyState::Closed) {
+ MOZ_ASSERT(NS_IsMainThread());
+ mSourceBuffers = new SourceBufferList(this);
+ mActiveSourceBuffers = new SourceBufferList(this);
+
+ nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(aWindow);
+ if (sop) {
+ mPrincipal = sop->GetPrincipal();
+ }
+
+ MSE_API("MediaSource(aWindow=%p) mSourceBuffers=%p mActiveSourceBuffers=%p",
+ aWindow, mSourceBuffers.get(), mActiveSourceBuffers.get());
+}
+
+void MediaSource::SetReadyState(MediaSourceReadyState aState) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(aState != mReadyState);
+ MSE_DEBUG("SetReadyState(aState=%" PRIu32 ") mReadyState=%" PRIu32,
+ static_cast<uint32_t>(aState), static_cast<uint32_t>(mReadyState));
+
+ MediaSourceReadyState oldState = mReadyState;
+ mReadyState = aState;
+
+ if (mReadyState == MediaSourceReadyState::Open &&
+ (oldState == MediaSourceReadyState::Closed ||
+ oldState == MediaSourceReadyState::Ended)) {
+ QueueAsyncSimpleEvent("sourceopen");
+ if (oldState == MediaSourceReadyState::Ended) {
+ // Notify reader that more data may come.
+ mDecoder->Ended(false);
+ }
+ return;
+ }
+
+ if (mReadyState == MediaSourceReadyState::Ended &&
+ oldState == MediaSourceReadyState::Open) {
+ QueueAsyncSimpleEvent("sourceended");
+ return;
+ }
+
+ if (mReadyState == MediaSourceReadyState::Closed &&
+ (oldState == MediaSourceReadyState::Open ||
+ oldState == MediaSourceReadyState::Ended)) {
+ QueueAsyncSimpleEvent("sourceclose");
+ return;
+ }
+
+ NS_WARNING("Invalid MediaSource readyState transition");
+}
+
+void MediaSource::DispatchSimpleEvent(const char* aName) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("Dispatch event '%s'", aName);
+ DispatchTrustedEvent(NS_ConvertUTF8toUTF16(aName));
+}
+
+void MediaSource::QueueAsyncSimpleEvent(const char* aName) {
+ MSE_DEBUG("Queuing event '%s'", aName);
+ nsCOMPtr<nsIRunnable> event = new AsyncEventRunner<MediaSource>(this, aName);
+ mAbstractMainThread->Dispatch(event.forget());
+}
+
+void MediaSource::DurationChange(const media::TimeUnit& aNewDuration,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("DurationChange(aNewDuration=%s)", aNewDuration.ToString().get());
+
+ // 1. If the current value of duration is equal to new duration, then return.
+ if (mDecoder->GetDuration() == aNewDuration.ToSeconds()) {
+ return;
+ }
+
+ // 2. If new duration is less than the highest starting presentation timestamp
+ // of any buffered coded frames for all SourceBuffer objects in sourceBuffers,
+ // then throw an InvalidStateError exception and abort these steps.
+ if (aNewDuration < mSourceBuffers->HighestStartTime()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ // 3. Let highest end time be the largest track buffer ranges end time across
+ // all the track buffers across all SourceBuffer objects in sourceBuffers.
+ media::TimeUnit highestEndTime = mSourceBuffers->HighestEndTime();
+ // 4. If new duration is less than highest end time, then
+ // 4.1 Update new duration to equal highest end time.
+ media::TimeUnit newDuration = std::max(aNewDuration, highestEndTime);
+
+ // 5. Update the media duration to new duration and run the HTMLMediaElement
+ // duration change algorithm.
+ mDecoder->SetMediaSourceDuration(newDuration);
+}
+
+void MediaSource::DurationChange(double aNewDuration, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("DurationChange(aNewDuration=%f)", aNewDuration);
+
+ // 1. If the current value of duration is equal to new duration, then return.
+ if (mDecoder->GetDuration() == aNewDuration) {
+ return;
+ }
+
+ // 2. If new duration is less than the highest starting presentation timestamp
+ // of any buffered coded frames for all SourceBuffer objects in sourceBuffers,
+ // then throw an InvalidStateError exception and abort these steps.
+ if (aNewDuration < mSourceBuffers->HighestStartTime().ToSeconds()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ // 3. Let highest end time be the largest track buffer ranges end time across
+ // all the track buffers across all SourceBuffer objects in sourceBuffers.
+ double highestEndTime = mSourceBuffers->HighestEndTime().ToSeconds();
+ // 4. If new duration is less than highest end time, then
+ // 4.1 Update new duration to equal highest end time.
+ double newDuration = std::max(aNewDuration, highestEndTime);
+
+ // 5. Update the media duration to new duration and run the HTMLMediaElement
+ // duration change algorithm.
+ mDecoder->SetMediaSourceDuration(newDuration);
+}
+
+already_AddRefed<Promise> MediaSource::MozDebugReaderData(ErrorResult& aRv) {
+ // Creating a JS promise
+ nsPIDOMWindowInner* win = GetOwner();
+ if (!win) {
+ aRv.Throw(NS_ERROR_UNEXPECTED);
+ return nullptr;
+ }
+ RefPtr<Promise> domPromise = Promise::Create(win->AsGlobal(), aRv);
+ if (NS_WARN_IF(aRv.Failed())) {
+ return nullptr;
+ }
+ MOZ_ASSERT(domPromise);
+ UniquePtr<MediaSourceDecoderDebugInfo> info =
+ MakeUnique<MediaSourceDecoderDebugInfo>();
+ mDecoder->RequestDebugInfo(*info)->Then(
+ mAbstractMainThread, __func__,
+ [domPromise, infoPtr = std::move(info)] {
+ domPromise->MaybeResolve(infoPtr.get());
+ },
+ [] {
+ MOZ_ASSERT_UNREACHABLE("Unexpected rejection while getting debug data");
+ });
+
+ return domPromise.forget();
+}
+
+nsPIDOMWindowInner* MediaSource::GetParentObject() const { return GetOwner(); }
+
+JSObject* MediaSource::WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) {
+ return MediaSource_Binding::Wrap(aCx, this, aGivenProto);
+}
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaSource, DOMEventTargetHelper,
+ mMediaElement, mSourceBuffers,
+ mActiveSourceBuffers)
+
+NS_IMPL_ADDREF_INHERITED(MediaSource, DOMEventTargetHelper)
+NS_IMPL_RELEASE_INHERITED(MediaSource, DOMEventTargetHelper)
+
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaSource)
+ NS_INTERFACE_MAP_ENTRY_CONCRETE(mozilla::dom::MediaSource)
+NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
+
+#undef MSE_DEBUG
+#undef MSE_API
+
+} // namespace dom
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/MediaSource.h b/dom/media/mediasource/MediaSource.h
new file mode 100644
index 0000000000..e0f90e8416
--- /dev/null
+++ b/dom/media/mediasource/MediaSource.h
@@ -0,0 +1,182 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_dom_MediaSource_h_
+#define mozilla_dom_MediaSource_h_
+
+#include "MediaSourceDecoder.h"
+#include "js/RootingAPI.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/DOMEventTargetHelper.h"
+#include "mozilla/MozPromise.h"
+#include "mozilla/dom/MediaSourceBinding.h"
+#include "nsCOMPtr.h"
+#include "nsCycleCollectionNoteChild.h"
+#include "nsCycleCollectionParticipant.h"
+#include "nsID.h"
+#include "nsISupports.h"
+#include "nscore.h"
+#include "TimeUnits.h"
+
+struct JSContext;
+class JSObject;
+class nsPIDOMWindowInner;
+
+namespace mozilla {
+
+class AbstractThread;
+class ErrorResult;
+template <typename T>
+class AsyncEventRunner;
+class MediaResult;
+
+namespace dom {
+class MediaSource;
+} // namespace dom
+DDLoggedTypeName(dom::MediaSource);
+
+namespace dom {
+
+class GlobalObject;
+class SourceBuffer;
+class SourceBufferList;
+template <typename T>
+class Optional;
+
+#define MOZILLA_DOM_MEDIASOURCE_IMPLEMENTATION_IID \
+ { \
+ 0x3839d699, 0x22c5, 0x439f, { \
+ 0x94, 0xca, 0x0e, 0x0b, 0x26, 0xf9, 0xca, 0xbf \
+ } \
+ }
+
+class MediaSource final : public DOMEventTargetHelper,
+ public DecoderDoctorLifeLogger<MediaSource> {
+ public:
+ /** WebIDL Methods. */
+ static already_AddRefed<MediaSource> Constructor(const GlobalObject& aGlobal,
+ ErrorResult& aRv);
+
+ SourceBufferList* SourceBuffers();
+ SourceBufferList* ActiveSourceBuffers();
+ MediaSourceReadyState ReadyState();
+
+ double Duration();
+ void SetDuration(double aDuration, ErrorResult& aRv);
+
+ already_AddRefed<SourceBuffer> AddSourceBuffer(const nsAString& aType,
+ ErrorResult& aRv);
+ void RemoveSourceBuffer(SourceBuffer& aSourceBuffer, ErrorResult& aRv);
+
+ void EndOfStream(const Optional<MediaSourceEndOfStreamError>& aError,
+ ErrorResult& aRv);
+ void EndOfStream(const MediaResult& aError);
+
+ void SetLiveSeekableRange(double aStart, double aEnd, ErrorResult& aRv);
+ void ClearLiveSeekableRange(ErrorResult& aRv);
+
+ static bool IsTypeSupported(const GlobalObject&, const nsAString& aType);
+ // Throws on aRv if not supported.
+ static void IsTypeSupported(const nsAString& aType,
+ DecoderDoctorDiagnostics* aDiagnostics,
+ ErrorResult& aRv);
+
+ IMPL_EVENT_HANDLER(sourceopen);
+ IMPL_EVENT_HANDLER(sourceended);
+ IMPL_EVENT_HANDLER(sourceclose);
+
+ /** End WebIDL Methods. */
+
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaSource, DOMEventTargetHelper)
+ NS_DECLARE_STATIC_IID_ACCESSOR(MOZILLA_DOM_MEDIASOURCE_IMPLEMENTATION_IID)
+
+ nsPIDOMWindowInner* GetParentObject() const;
+
+ JSObject* WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) override;
+
+ // Attach this MediaSource to Decoder aDecoder. Returns false if already
+ // attached.
+ bool Attach(MediaSourceDecoder* aDecoder);
+ void Detach();
+
+ // Set mReadyState to aState and fire the required events at the MediaSource.
+ void SetReadyState(MediaSourceReadyState aState);
+
+ // Used by SourceBuffer to call CreateSubDecoder.
+ MediaSourceDecoder* GetDecoder() { return mDecoder; }
+
+ nsIPrincipal* GetPrincipal() { return mPrincipal; }
+
+ // Returns a structure describing the state of the MediaSource internal
+ // buffered data. Used for debugging purposes.
+ already_AddRefed<Promise> MozDebugReaderData(ErrorResult& aRv);
+
+ bool HasLiveSeekableRange() const { return mLiveSeekableRange.isSome(); }
+ media::TimeRanges LiveSeekableRange() const {
+ return mLiveSeekableRange.value();
+ }
+
+ AbstractThread* AbstractMainThread() const { return mAbstractMainThread; }
+
+ // Resolve all CompletionPromise pending.
+ void CompletePendingTransactions();
+
+ private:
+ // SourceBuffer uses SetDuration and SourceBufferIsActive
+ friend class mozilla::dom::SourceBuffer;
+
+ ~MediaSource();
+
+ explicit MediaSource(nsPIDOMWindowInner* aWindow);
+
+ friend class AsyncEventRunner<MediaSource>;
+ void DispatchSimpleEvent(const char* aName);
+ void QueueAsyncSimpleEvent(const char* aName);
+
+ void DurationChange(const media::TimeUnit& aNewDuration, ErrorResult& aRv);
+ void DurationChange(double aNewDuration, ErrorResult& aRv);
+
+ // SetDuration with no checks.
+ void SetDuration(const media::TimeUnit& aDuration);
+
+ typedef MozPromise<bool, MediaResult, /* IsExclusive = */ true>
+ ActiveCompletionPromise;
+ // Mark SourceBuffer as active and rebuild ActiveSourceBuffers.
+ // Return a MozPromise that will be resolved once all related operations are
+ // completed, or can't progress any further.
+ // Such as, transition of readyState from HAVE_NOTHING to HAVE_METADATA.
+ RefPtr<ActiveCompletionPromise> SourceBufferIsActive(
+ SourceBuffer* aSourceBuffer);
+
+ RefPtr<SourceBufferList> mSourceBuffers;
+ RefPtr<SourceBufferList> mActiveSourceBuffers;
+
+ RefPtr<MediaSourceDecoder> mDecoder;
+ // Ensures the media element remains alive to dispatch progress and
+ // durationchanged events.
+ RefPtr<HTMLMediaElement> mMediaElement;
+
+ RefPtr<nsIPrincipal> mPrincipal;
+
+ const RefPtr<AbstractThread> mAbstractMainThread;
+
+ MediaSourceReadyState mReadyState;
+
+ Maybe<media::TimeRanges> mLiveSeekableRange;
+ nsTArray<MozPromiseHolder<ActiveCompletionPromise>> mCompletionPromises;
+};
+
+NS_DEFINE_STATIC_IID_ACCESSOR(MediaSource,
+ MOZILLA_DOM_MEDIASOURCE_IMPLEMENTATION_IID)
+
+} // namespace dom
+
+} // namespace mozilla
+
+#endif /* mozilla_dom_MediaSource_h_ */
diff --git a/dom/media/mediasource/MediaSourceDecoder.cpp b/dom/media/mediasource/MediaSourceDecoder.cpp
new file mode 100644
index 0000000000..24a74e261b
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceDecoder.cpp
@@ -0,0 +1,372 @@
+/* -*- mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include "MediaSourceDecoder.h"
+
+#include "base/process_util.h"
+#include "mozilla/Logging.h"
+#include "ExternalEngineStateMachine.h"
+#include "MediaDecoder.h"
+#include "MediaDecoderStateMachine.h"
+#include "MediaShutdownManager.h"
+#include "MediaSource.h"
+#include "MediaSourceDemuxer.h"
+#include "MediaSourceUtils.h"
+#include "SourceBuffer.h"
+#include "SourceBufferList.h"
+#include "VideoUtils.h"
+#include <algorithm>
+
+extern mozilla::LogModule* GetMediaSourceLog();
+
+#define MSE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Debug, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+#define MSE_DEBUGV(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Verbose, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+
+using namespace mozilla::media;
+
+namespace mozilla {
+
+MediaSourceDecoder::MediaSourceDecoder(MediaDecoderInit& aInit)
+ : MediaDecoder(aInit), mMediaSource(nullptr), mEnded(false) {
+ mExplicitDuration.emplace(UnspecifiedNaN<double>());
+}
+
+MediaDecoderStateMachineBase* MediaSourceDecoder::CreateStateMachine(
+ bool aDisableExternalEngine) {
+ MOZ_ASSERT(NS_IsMainThread());
+ // if `mDemuxer` already exists, that means we're in the process of recreating
+ // the state machine. The track buffers are tied to the demuxer so we would
+ // need to reuse it.
+ if (!mDemuxer) {
+ mDemuxer = new MediaSourceDemuxer(AbstractMainThread());
+ }
+ MediaFormatReaderInit init;
+ init.mVideoFrameContainer = GetVideoFrameContainer();
+ init.mKnowsCompositor = GetCompositor();
+ init.mCrashHelper = GetOwner()->CreateGMPCrashHelper();
+ init.mFrameStats = mFrameStats;
+ init.mMediaDecoderOwnerID = mOwner;
+ static Atomic<uint32_t> sTrackingIdCounter(0);
+ init.mTrackingId.emplace(TrackingId::Source::MSEDecoder, sTrackingIdCounter++,
+ TrackingId::TrackAcrossProcesses::Yes);
+ mReader = new MediaFormatReader(init, mDemuxer);
+#ifdef MOZ_WMF_MEDIA_ENGINE
+ // TODO : Only for testing development for now. In the future this should be
+ // used for encrypted content only.
+ if (StaticPrefs::media_wmf_media_engine_enabled() &&
+ !aDisableExternalEngine) {
+ return new ExternalEngineStateMachine(this, mReader);
+ }
+#endif
+ return new MediaDecoderStateMachine(this, mReader);
+}
+
+nsresult MediaSourceDecoder::Load(nsIPrincipal* aPrincipal) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(!GetStateMachine());
+
+ mPrincipal = aPrincipal;
+
+ nsresult rv = MediaShutdownManager::Instance().Register(this);
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ return rv;
+ }
+ return CreateAndInitStateMachine(!mEnded);
+}
+
+template <typename IntervalType>
+IntervalType MediaSourceDecoder::GetSeekableImpl() {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (!mMediaSource) {
+ NS_WARNING("MediaSource element isn't attached");
+ return IntervalType();
+ }
+
+ TimeIntervals seekable;
+ double duration = mMediaSource->Duration();
+ if (std::isnan(duration)) {
+ // Return empty range.
+ } else if (duration > 0 && std::isinf(duration)) {
+ media::TimeIntervals buffered = GetBuffered();
+
+ // 1. If live seekable range is not empty:
+ if (mMediaSource->HasLiveSeekableRange()) {
+ // 1. Let union ranges be the union of live seekable range and the
+ // HTMLMediaElement.buffered attribute.
+ TimeRanges unionRanges =
+ media::TimeRanges(buffered) + mMediaSource->LiveSeekableRange();
+ // 2. Return a single range with a start time equal to the earliest start
+ // time in union ranges and an end time equal to the highest end time in
+ // union ranges and abort these steps.
+ if constexpr (std::is_same<IntervalType, TimeRanges>::value) {
+ TimeRanges seekableRange = media::TimeRanges(
+ TimeRange(unionRanges.GetStart(), unionRanges.GetEnd()));
+ return seekableRange;
+ } else {
+ MOZ_RELEASE_ASSERT(false);
+ }
+ }
+
+ if (!buffered.IsEmpty()) {
+ seekable += media::TimeInterval(TimeUnit::Zero(), buffered.GetEnd());
+ }
+ } else {
+ if constexpr (std::is_same<IntervalType, TimeRanges>::value) {
+ // Common case: seekable in entire range of the media.
+ return TimeRanges(TimeRange(0, duration));
+ } else if constexpr (std::is_same<IntervalType, TimeIntervals>::value) {
+ seekable += media::TimeInterval(TimeUnit::Zero(),
+ mDuration.match(DurationToTimeUnit()));
+ } else {
+ MOZ_RELEASE_ASSERT(false);
+ }
+ }
+ MSE_DEBUG("ranges=%s", DumpTimeRanges(seekable).get());
+ return IntervalType(seekable);
+}
+
+media::TimeIntervals MediaSourceDecoder::GetSeekable() {
+ return GetSeekableImpl<media::TimeIntervals>();
+}
+
+media::TimeRanges MediaSourceDecoder::GetSeekableTimeRanges() {
+ return GetSeekableImpl<media::TimeRanges>();
+}
+
+media::TimeIntervals MediaSourceDecoder::GetBuffered() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (!mMediaSource) {
+ NS_WARNING("MediaSource element isn't attached");
+ return media::TimeIntervals::Invalid();
+ }
+ dom::SourceBufferList* sourceBuffers = mMediaSource->ActiveSourceBuffers();
+ if (!sourceBuffers) {
+ // Media source object is shutting down.
+ return TimeIntervals();
+ }
+ TimeUnit highestEndTime;
+ nsTArray<media::TimeIntervals> activeRanges;
+ media::TimeIntervals buffered;
+
+ for (uint32_t i = 0; i < sourceBuffers->Length(); i++) {
+ bool found;
+ dom::SourceBuffer* sb = sourceBuffers->IndexedGetter(i, found);
+ MOZ_ASSERT(found);
+
+ activeRanges.AppendElement(sb->GetTimeIntervals());
+ highestEndTime =
+ std::max(highestEndTime, activeRanges.LastElement().GetEnd());
+ }
+
+ buffered += media::TimeInterval(TimeUnit::Zero(), highestEndTime);
+
+ for (auto& range : activeRanges) {
+ if (mEnded && !range.IsEmpty()) {
+ // Set the end time on the last range to highestEndTime by adding a
+ // new range spanning the current end time to highestEndTime, which
+ // Normalize() will then merge with the old last range.
+ range += media::TimeInterval(range.GetEnd(), highestEndTime);
+ }
+ buffered.Intersection(range);
+ }
+
+ MSE_DEBUG("ranges=%s", DumpTimeRanges(buffered).get());
+ return buffered;
+}
+
+void MediaSourceDecoder::Shutdown() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("Shutdown");
+ // Detach first so that TrackBuffers are unused on the main thread when
+ // shut down on the decode task queue.
+ if (mMediaSource) {
+ mMediaSource->Detach();
+ }
+ mDemuxer = nullptr;
+
+ MediaDecoder::Shutdown();
+}
+
+void MediaSourceDecoder::AttachMediaSource(dom::MediaSource* aMediaSource) {
+ MOZ_ASSERT(!mMediaSource && !GetStateMachine() && NS_IsMainThread());
+ mMediaSource = aMediaSource;
+ DDLINKCHILD("mediasource", aMediaSource);
+}
+
+void MediaSourceDecoder::DetachMediaSource() {
+ MOZ_ASSERT(mMediaSource && NS_IsMainThread());
+ DDUNLINKCHILD(mMediaSource);
+ mMediaSource = nullptr;
+}
+
+void MediaSourceDecoder::Ended(bool aEnded) {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (aEnded) {
+ // We want the MediaSourceReader to refresh its buffered range as it may
+ // have been modified (end lined up).
+ NotifyDataArrived();
+ }
+ mEnded = aEnded;
+ GetStateMachine()->DispatchIsLiveStream(!mEnded);
+}
+
+void MediaSourceDecoder::AddSizeOfResources(ResourceSizes* aSizes) {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (GetDemuxer()) {
+ GetDemuxer()->AddSizeOfResources(aSizes);
+ }
+}
+
+void MediaSourceDecoder::SetInitialDuration(const TimeUnit& aDuration) {
+ MOZ_ASSERT(NS_IsMainThread());
+ // Only use the decoded duration if one wasn't already
+ // set.
+ if (!mMediaSource || !std::isnan(ExplicitDuration())) {
+ return;
+ }
+ SetMediaSourceDuration(aDuration);
+}
+
+void MediaSourceDecoder::SetMediaSourceDuration(const TimeUnit& aDuration) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(!IsShutdown());
+ if (aDuration.IsPositiveOrZero()) {
+ SetExplicitDuration(ToMicrosecondResolution(aDuration.ToSeconds()));
+ } else {
+ SetExplicitDuration(PositiveInfinity<double>());
+ }
+}
+
+void MediaSourceDecoder::SetMediaSourceDuration(double aDuration) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(!IsShutdown());
+ if (aDuration >= 0) {
+ SetExplicitDuration(aDuration);
+ } else {
+ SetExplicitDuration(PositiveInfinity<double>());
+ }
+}
+
+RefPtr<GenericPromise> MediaSourceDecoder::RequestDebugInfo(
+ dom::MediaSourceDecoderDebugInfo& aInfo) {
+ // This should be safe to call off main thead, but there's no such usage at
+ // time of writing. Can be carefully relaxed if needed.
+ MOZ_ASSERT(NS_IsMainThread(), "Expects to be called on main thread.");
+ nsTArray<RefPtr<GenericPromise>> promises;
+ if (mReader) {
+ promises.AppendElement(mReader->RequestDebugInfo(aInfo.mReader));
+ }
+ if (mDemuxer) {
+ promises.AppendElement(mDemuxer->GetDebugInfo(aInfo.mDemuxer));
+ }
+ return GenericPromise::All(GetCurrentSerialEventTarget(), promises)
+ ->Then(
+ GetCurrentSerialEventTarget(), __func__,
+ []() { return GenericPromise::CreateAndResolve(true, __func__); },
+ [] {
+ return GenericPromise::CreateAndReject(NS_ERROR_FAILURE, __func__);
+ });
+}
+
+double MediaSourceDecoder::GetDuration() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return ExplicitDuration();
+}
+
+MediaDecoderOwner::NextFrameStatus
+MediaSourceDecoder::NextFrameBufferedStatus() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (!mMediaSource ||
+ mMediaSource->ReadyState() == dom::MediaSourceReadyState::Closed) {
+ return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
+ }
+
+ // Next frame hasn't been decoded yet.
+ // Use the buffered range to consider if we have the next frame available.
+ auto currentPosition = CurrentPosition();
+ TimeIntervals buffered = GetBuffered();
+ buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ / 2);
+ TimeInterval interval(
+ currentPosition, currentPosition + DEFAULT_NEXT_FRAME_AVAILABLE_BUFFERED);
+ return buffered.ContainsWithStrictEnd(ClampIntervalToEnd(interval))
+ ? MediaDecoderOwner::NEXT_FRAME_AVAILABLE
+ : MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
+}
+
+bool MediaSourceDecoder::CanPlayThroughImpl() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (NextFrameBufferedStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE) {
+ return false;
+ }
+
+ if (std::isnan(mMediaSource->Duration())) {
+ // Don't have any data yet.
+ return false;
+ }
+ TimeUnit duration = TimeUnit::FromSeconds(mMediaSource->Duration());
+ auto currentPosition = CurrentPosition();
+ if (duration <= currentPosition) {
+ return true;
+ }
+ // If we have data up to the mediasource's duration or 3s ahead, we can
+ // assume that we can play without interruption.
+ dom::SourceBufferList* sourceBuffers = mMediaSource->ActiveSourceBuffers();
+ TimeUnit bufferedEnd = sourceBuffers->GetHighestBufferedEndTime();
+ TimeUnit timeAhead =
+ std::min(duration, currentPosition + TimeUnit::FromSeconds(3));
+ TimeInterval interval(currentPosition, timeAhead);
+ return bufferedEnd >= timeAhead;
+}
+
+TimeInterval MediaSourceDecoder::ClampIntervalToEnd(
+ const TimeInterval& aInterval) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (!mEnded) {
+ return aInterval;
+ }
+ TimeUnit duration = mDuration.match(DurationToTimeUnit());
+ if (duration < aInterval.mStart) {
+ return aInterval;
+ }
+ return TimeInterval(aInterval.mStart, std::min(aInterval.mEnd, duration),
+ aInterval.mFuzz);
+}
+
+void MediaSourceDecoder::NotifyInitDataArrived() {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (mDemuxer) {
+ mDemuxer->NotifyInitDataArrived();
+ }
+}
+
+void MediaSourceDecoder::NotifyDataArrived() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
+ NotifyReaderDataArrived();
+ GetOwner()->DownloadProgressed();
+}
+
+already_AddRefed<nsIPrincipal> MediaSourceDecoder::GetCurrentPrincipal() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return do_AddRef(mPrincipal);
+}
+
+bool MediaSourceDecoder::HadCrossOriginRedirects() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return false;
+}
+
+#undef MSE_DEBUG
+#undef MSE_DEBUGV
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/MediaSourceDecoder.h b/dom/media/mediasource/MediaSourceDecoder.h
new file mode 100644
index 0000000000..ff312cb6cf
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceDecoder.h
@@ -0,0 +1,101 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_MEDIASOURCEDECODER_H_
+#define MOZILLA_MEDIASOURCEDECODER_H_
+
+#include "MediaDecoder.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
+
+namespace mozilla {
+
+class MediaDecoderStateMachineBase;
+class MediaSourceDemuxer;
+
+namespace dom {
+
+class MediaSource;
+
+} // namespace dom
+
+DDLoggedTypeDeclNameAndBase(MediaSourceDecoder, MediaDecoder);
+
+class MediaSourceDecoder : public MediaDecoder,
+ public DecoderDoctorLifeLogger<MediaSourceDecoder> {
+ public:
+ explicit MediaSourceDecoder(MediaDecoderInit& aInit);
+
+ nsresult Load(nsIPrincipal* aPrincipal);
+ media::TimeIntervals GetSeekable() override;
+ media::TimeRanges GetSeekableTimeRanges() override;
+ media::TimeIntervals GetBuffered() override;
+
+ void Shutdown() override;
+
+ void AttachMediaSource(dom::MediaSource* aMediaSource);
+ void DetachMediaSource();
+
+ void Ended(bool aEnded);
+
+ // Return the duration of the video in seconds.
+ double GetDuration() override;
+
+ void SetInitialDuration(const media::TimeUnit& aDuration);
+ void SetMediaSourceDuration(const media::TimeUnit& aDuration);
+ void SetMediaSourceDuration(double aDuration);
+
+ MediaSourceDemuxer* GetDemuxer() { return mDemuxer; }
+
+ already_AddRefed<nsIPrincipal> GetCurrentPrincipal() override;
+
+ bool HadCrossOriginRedirects() override;
+
+ bool IsTransportSeekable() override { return true; }
+
+ // Requests that the MediaSourceDecoder populates aInfo with debug
+ // information. This may be done asynchronously, and aInfo should *not* be
+ // accessed by the caller until the returned promise is resolved or rejected.
+ RefPtr<GenericPromise> RequestDebugInfo(
+ dom::MediaSourceDecoderDebugInfo& aInfo);
+
+ void AddSizeOfResources(ResourceSizes* aSizes) override;
+
+ MediaDecoderOwner::NextFrameStatus NextFrameBufferedStatus() override;
+
+ bool IsMSE() const override { return true; }
+
+ void NotifyInitDataArrived();
+
+ // Called as data appended to the source buffer or EOS is called on the media
+ // source. Main thread only.
+ void NotifyDataArrived();
+
+ private:
+ MediaDecoderStateMachineBase* CreateStateMachine(
+ bool aDisableExternalEngine) override;
+
+ template <typename IntervalType>
+ IntervalType GetSeekableImpl();
+
+ void DoSetMediaSourceDuration(double aDuration);
+ media::TimeInterval ClampIntervalToEnd(const media::TimeInterval& aInterval);
+ bool CanPlayThroughImpl() override;
+
+ RefPtr<nsIPrincipal> mPrincipal;
+
+ // The owning MediaSource holds a strong reference to this decoder, and
+ // calls Attach/DetachMediaSource on this decoder to set and clear
+ // mMediaSource.
+ dom::MediaSource* mMediaSource;
+ RefPtr<MediaSourceDemuxer> mDemuxer;
+
+ bool mEnded;
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_MEDIASOURCEDECODER_H_ */
diff --git a/dom/media/mediasource/MediaSourceDemuxer.cpp b/dom/media/mediasource/MediaSourceDemuxer.cpp
new file mode 100644
index 0000000000..9d08eda4fa
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceDemuxer.cpp
@@ -0,0 +1,530 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaSourceDemuxer.h"
+
+#include "MediaSourceUtils.h"
+#include "OpusDecoder.h"
+#include "SourceBufferList.h"
+#include "VorbisDecoder.h"
+#include "VideoUtils.h"
+#include "nsPrintfCString.h"
+
+#include <algorithm>
+#include <limits>
+#include <stdint.h>
+
+namespace mozilla {
+
+typedef TrackInfo::TrackType TrackType;
+using media::TimeIntervals;
+using media::TimeUnit;
+
+MediaSourceDemuxer::MediaSourceDemuxer(AbstractThread* aAbstractMainThread)
+ : mTaskQueue(
+ TaskQueue::Create(GetMediaThreadPool(MediaThreadType::SUPERVISOR),
+ "MediaSourceDemuxer::mTaskQueue")),
+ mMonitor("MediaSourceDemuxer") {
+ MOZ_ASSERT(NS_IsMainThread());
+}
+
+constexpr TimeUnit MediaSourceDemuxer::EOS_FUZZ;
+constexpr TimeUnit MediaSourceDemuxer::EOS_FUZZ_START;
+
+RefPtr<MediaSourceDemuxer::InitPromise> MediaSourceDemuxer::Init() {
+ RefPtr<MediaSourceDemuxer> self = this;
+ return InvokeAsync(GetTaskQueue(), __func__, [self]() {
+ if (self->ScanSourceBuffersForContent()) {
+ return InitPromise::CreateAndResolve(NS_OK, __func__);
+ }
+
+ RefPtr<InitPromise> p = self->mInitPromise.Ensure(__func__);
+
+ return p;
+ });
+}
+
+void MediaSourceDemuxer::AddSizeOfResources(
+ MediaSourceDecoder::ResourceSizes* aSizes) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ // NB: The track buffers must only be accessed on the TaskQueue.
+ RefPtr<MediaSourceDemuxer> self = this;
+ RefPtr<MediaSourceDecoder::ResourceSizes> sizes = aSizes;
+ nsCOMPtr<nsIRunnable> task = NS_NewRunnableFunction(
+ "MediaSourceDemuxer::AddSizeOfResources", [self, sizes]() {
+ for (const RefPtr<TrackBuffersManager>& manager :
+ self->mSourceBuffers) {
+ manager->AddSizeOfResources(sizes);
+ }
+ });
+
+ nsresult rv = GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+void MediaSourceDemuxer::NotifyInitDataArrived() {
+ RefPtr<MediaSourceDemuxer> self = this;
+ nsCOMPtr<nsIRunnable> task = NS_NewRunnableFunction(
+ "MediaSourceDemuxer::NotifyInitDataArrived", [self]() {
+ if (self->mInitPromise.IsEmpty()) {
+ return;
+ }
+ if (self->ScanSourceBuffersForContent()) {
+ self->mInitPromise.ResolveIfExists(NS_OK, __func__);
+ }
+ });
+ nsresult rv = GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+bool MediaSourceDemuxer::ScanSourceBuffersForContent() {
+ MOZ_ASSERT(OnTaskQueue());
+
+ if (mSourceBuffers.IsEmpty()) {
+ return false;
+ }
+
+ MonitorAutoLock mon(mMonitor);
+
+ bool haveEmptySourceBuffer = false;
+ for (const auto& sourceBuffer : mSourceBuffers) {
+ MediaInfo info = sourceBuffer->GetMetadata();
+ if (!info.HasAudio() && !info.HasVideo()) {
+ haveEmptySourceBuffer = true;
+ }
+ if (info.HasAudio() && !mAudioTrack) {
+ mInfo.mAudio = info.mAudio;
+ mAudioTrack = sourceBuffer;
+ }
+ if (info.HasVideo() && !mVideoTrack) {
+ mInfo.mVideo = info.mVideo;
+ mVideoTrack = sourceBuffer;
+ }
+ if (info.IsEncrypted() && !mInfo.IsEncrypted()) {
+ mInfo.mCrypto = info.mCrypto;
+ }
+ }
+ if (mInfo.HasAudio() && mInfo.HasVideo()) {
+ // We have both audio and video. We can ignore non-ready source buffer.
+ return true;
+ }
+ return !haveEmptySourceBuffer;
+}
+
+uint32_t MediaSourceDemuxer::GetNumberTracks(TrackType aType) const {
+ MonitorAutoLock mon(mMonitor);
+
+ switch (aType) {
+ case TrackType::kAudioTrack:
+ return mInfo.HasAudio() ? 1u : 0;
+ case TrackType::kVideoTrack:
+ return mInfo.HasVideo() ? 1u : 0;
+ default:
+ return 0;
+ }
+}
+
+already_AddRefed<MediaTrackDemuxer> MediaSourceDemuxer::GetTrackDemuxer(
+ TrackType aType, uint32_t aTrackNumber) {
+ MonitorAutoLock mon(mMonitor);
+ RefPtr<TrackBuffersManager> manager = GetManager(aType);
+ if (!manager) {
+ return nullptr;
+ }
+ RefPtr<MediaSourceTrackDemuxer> e =
+ new MediaSourceTrackDemuxer(this, aType, manager);
+ DDLINKCHILD("track demuxer", e.get());
+ mDemuxers.AppendElement(e);
+ return e.forget();
+}
+
+bool MediaSourceDemuxer::IsSeekable() const { return true; }
+
+UniquePtr<EncryptionInfo> MediaSourceDemuxer::GetCrypto() {
+ MonitorAutoLock mon(mMonitor);
+ auto crypto = MakeUnique<EncryptionInfo>();
+ *crypto = mInfo.mCrypto;
+ return crypto;
+}
+
+void MediaSourceDemuxer::AttachSourceBuffer(
+ RefPtr<TrackBuffersManager>& aSourceBuffer) {
+ nsCOMPtr<nsIRunnable> task = NewRunnableMethod<RefPtr<TrackBuffersManager>&&>(
+ "MediaSourceDemuxer::DoAttachSourceBuffer", this,
+ &MediaSourceDemuxer::DoAttachSourceBuffer, aSourceBuffer);
+ nsresult rv = GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+void MediaSourceDemuxer::DoAttachSourceBuffer(
+ RefPtr<mozilla::TrackBuffersManager>&& aSourceBuffer) {
+ MOZ_ASSERT(OnTaskQueue());
+ mSourceBuffers.AppendElement(std::move(aSourceBuffer));
+ ScanSourceBuffersForContent();
+}
+
+void MediaSourceDemuxer::DetachSourceBuffer(
+ RefPtr<TrackBuffersManager>& aSourceBuffer) {
+ nsCOMPtr<nsIRunnable> task =
+ NS_NewRunnableFunction("MediaSourceDemuxer::DoDetachSourceBuffer",
+ [self = RefPtr{this}, aSourceBuffer]() {
+ self->DoDetachSourceBuffer(aSourceBuffer);
+ });
+ nsresult rv = GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+void MediaSourceDemuxer::DoDetachSourceBuffer(
+ const RefPtr<TrackBuffersManager>& aSourceBuffer) {
+ MOZ_ASSERT(OnTaskQueue());
+ mSourceBuffers.RemoveElementsBy(
+ [&aSourceBuffer](const RefPtr<TrackBuffersManager> aLinkedSourceBuffer) {
+ return aLinkedSourceBuffer == aSourceBuffer;
+ });
+
+ AutoTArray<RefPtr<MediaSourceTrackDemuxer>, 2> matchingDemuxers;
+ {
+ MonitorAutoLock mon(mMonitor);
+ if (aSourceBuffer == mAudioTrack) {
+ mAudioTrack = nullptr;
+ }
+ if (aSourceBuffer == mVideoTrack) {
+ mVideoTrack = nullptr;
+ }
+
+ mDemuxers.RemoveElementsBy(
+ [&](RefPtr<MediaSourceTrackDemuxer>& elementRef) {
+ if (!elementRef->HasManager(aSourceBuffer)) {
+ return false;
+ }
+ matchingDemuxers.AppendElement(std::move(elementRef));
+ return true;
+ });
+ }
+
+ for (MediaSourceTrackDemuxer* demuxer : matchingDemuxers) {
+ demuxer->DetachManager();
+ }
+ ScanSourceBuffersForContent();
+}
+
+TrackInfo* MediaSourceDemuxer::GetTrackInfo(TrackType aTrack) {
+ switch (aTrack) {
+ case TrackType::kAudioTrack:
+ return &mInfo.mAudio;
+ case TrackType::kVideoTrack:
+ return &mInfo.mVideo;
+ default:
+ return nullptr;
+ }
+}
+
+RefPtr<TrackBuffersManager> MediaSourceDemuxer::GetManager(TrackType aTrack) {
+ switch (aTrack) {
+ case TrackType::kAudioTrack:
+ return mAudioTrack;
+ case TrackType::kVideoTrack:
+ return mVideoTrack;
+ default:
+ return nullptr;
+ }
+}
+
+MediaSourceDemuxer::~MediaSourceDemuxer() {
+ mInitPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
+}
+
+RefPtr<GenericPromise> MediaSourceDemuxer::GetDebugInfo(
+ dom::MediaSourceDemuxerDebugInfo& aInfo) const {
+ MonitorAutoLock mon(mMonitor);
+ nsTArray<RefPtr<GenericPromise>> promises;
+ if (mAudioTrack) {
+ promises.AppendElement(mAudioTrack->RequestDebugInfo(aInfo.mAudioTrack));
+ }
+ if (mVideoTrack) {
+ promises.AppendElement(mVideoTrack->RequestDebugInfo(aInfo.mVideoTrack));
+ }
+ return GenericPromise::All(GetCurrentSerialEventTarget(), promises)
+ ->Then(
+ GetCurrentSerialEventTarget(), __func__,
+ []() { return GenericPromise::CreateAndResolve(true, __func__); },
+ [] {
+ return GenericPromise::CreateAndReject(NS_ERROR_FAILURE, __func__);
+ });
+}
+
+MediaSourceTrackDemuxer::MediaSourceTrackDemuxer(MediaSourceDemuxer* aParent,
+ TrackInfo::TrackType aType,
+ TrackBuffersManager* aManager)
+ : mParent(aParent),
+ mTaskQueue(mParent->GetTaskQueue()),
+ mType(aType),
+ mMonitor("MediaSourceTrackDemuxer"),
+ mManager(aManager),
+ mReset(true),
+ mPreRoll(TimeUnit::FromMicroseconds(
+ OpusDataDecoder::IsOpus(mParent->GetTrackInfo(mType)->mMimeType) ||
+ VorbisDataDecoder::IsVorbis(
+ mParent->GetTrackInfo(mType)->mMimeType)
+ ? 80000
+ : mParent->GetTrackInfo(mType)->mMimeType.EqualsLiteral(
+ "audio/mp4a-latm")
+ // AAC encoder delay is by default 2112 audio frames.
+ // See
+ // https://developer.apple.com/library/content/documentation/QuickTime/QTFF/QTFFAppenG/QTFFAppenG.html
+ // So we always seek 2112 frames
+ ? (2112 * 1000000ULL /
+ mParent->GetTrackInfo(mType)->GetAsAudioInfo()->mRate)
+ : 0)) {
+ MOZ_ASSERT(mParent);
+ MOZ_ASSERT(mTaskQueue);
+}
+
+UniquePtr<TrackInfo> MediaSourceTrackDemuxer::GetInfo() const {
+ MonitorAutoLock mon(mParent->mMonitor);
+ return mParent->GetTrackInfo(mType)->Clone();
+}
+
+RefPtr<MediaSourceTrackDemuxer::SeekPromise> MediaSourceTrackDemuxer::Seek(
+ const TimeUnit& aTime) {
+ MOZ_ASSERT(mParent, "Called after BreackCycle()");
+ return InvokeAsync(mParent->GetTaskQueue(), this, __func__,
+ &MediaSourceTrackDemuxer::DoSeek, aTime);
+}
+
+RefPtr<MediaSourceTrackDemuxer::SamplesPromise>
+MediaSourceTrackDemuxer::GetSamples(int32_t aNumSamples) {
+ MOZ_ASSERT(mParent, "Called after BreackCycle()");
+ return InvokeAsync(mParent->GetTaskQueue(), this, __func__,
+ &MediaSourceTrackDemuxer::DoGetSamples, aNumSamples);
+}
+
+void MediaSourceTrackDemuxer::Reset() {
+ MOZ_ASSERT(mParent, "Called after BreackCycle()");
+ RefPtr<MediaSourceTrackDemuxer> self = this;
+ nsCOMPtr<nsIRunnable> task =
+ NS_NewRunnableFunction("MediaSourceTrackDemuxer::Reset", [self]() {
+ self->mNextSample.reset();
+ self->mReset = true;
+ if (!self->mManager) {
+ return;
+ }
+ MOZ_ASSERT(self->OnTaskQueue());
+ self->mManager->Seek(self->mType, TimeUnit::Zero(), TimeUnit::Zero());
+ {
+ MonitorAutoLock mon(self->mMonitor);
+ self->mNextRandomAccessPoint =
+ self->mManager->GetNextRandomAccessPoint(
+ self->mType, MediaSourceDemuxer::EOS_FUZZ);
+ }
+ });
+ nsresult rv = mParent->GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+nsresult MediaSourceTrackDemuxer::GetNextRandomAccessPoint(TimeUnit* aTime) {
+ MonitorAutoLock mon(mMonitor);
+ *aTime = mNextRandomAccessPoint;
+ return NS_OK;
+}
+
+RefPtr<MediaSourceTrackDemuxer::SkipAccessPointPromise>
+MediaSourceTrackDemuxer::SkipToNextRandomAccessPoint(
+ const TimeUnit& aTimeThreshold) {
+ return InvokeAsync(mParent->GetTaskQueue(), this, __func__,
+ &MediaSourceTrackDemuxer::DoSkipToNextRandomAccessPoint,
+ aTimeThreshold);
+}
+
+media::TimeIntervals MediaSourceTrackDemuxer::GetBuffered() {
+ MonitorAutoLock mon(mMonitor);
+ if (!mManager) {
+ return media::TimeIntervals();
+ }
+ return mManager->Buffered();
+}
+
+void MediaSourceTrackDemuxer::BreakCycles() {
+ RefPtr<MediaSourceTrackDemuxer> self = this;
+ nsCOMPtr<nsIRunnable> task =
+ NS_NewRunnableFunction("MediaSourceTrackDemuxer::BreakCycles", [self]() {
+ self->DetachManager();
+ self->mParent = nullptr;
+ });
+ nsresult rv = mParent->GetTaskQueue()->Dispatch(task.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+RefPtr<MediaSourceTrackDemuxer::SeekPromise> MediaSourceTrackDemuxer::DoSeek(
+ const TimeUnit& aTime) {
+ if (!mManager) {
+ return SeekPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_CANCELED,
+ RESULT_DETAIL("manager is detached.")),
+ __func__);
+ }
+
+ MOZ_ASSERT(OnTaskQueue());
+ TimeIntervals buffered = mManager->Buffered(mType);
+ // Fuzz factor represents a +/- threshold. So when seeking it allows the gap
+ // to be twice as big as the fuzz value. We only want to allow EOS_FUZZ gap.
+ buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ / 2);
+ TimeUnit seekTime = std::max(aTime - mPreRoll, TimeUnit::Zero());
+
+ if (mManager->IsEnded() && seekTime >= buffered.GetEnd()) {
+ // We're attempting to seek past the end time. Cap seekTime so that we seek
+ // to the last sample instead.
+ seekTime = std::max(mManager->HighestStartTime(mType) - mPreRoll,
+ TimeUnit::Zero());
+ }
+ if (!buffered.ContainsWithStrictEnd(seekTime)) {
+ if (!buffered.ContainsWithStrictEnd(aTime)) {
+ // We don't have the data to seek to.
+ return SeekPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA,
+ __func__);
+ }
+ // Theoretically we should reject the promise with WAITING_FOR_DATA,
+ // however, to avoid unwanted regressions we assume that if at this time
+ // we don't have the wanted data it won't come later.
+ // Instead of using the pre-rolled time, use the earliest time available in
+ // the interval.
+ TimeIntervals::IndexType index = buffered.Find(aTime);
+ MOZ_ASSERT(index != TimeIntervals::NoIndex);
+ seekTime = buffered[index].mStart;
+ }
+ seekTime = mManager->Seek(mType, seekTime, MediaSourceDemuxer::EOS_FUZZ);
+ MediaResult result = NS_OK;
+ RefPtr<MediaRawData> sample =
+ mManager->GetSample(mType, TimeUnit::Zero(), result);
+ MOZ_ASSERT(NS_SUCCEEDED(result) && sample);
+ mNextSample = Some(sample);
+ mReset = false;
+ {
+ MonitorAutoLock mon(mMonitor);
+ mNextRandomAccessPoint =
+ mManager->GetNextRandomAccessPoint(mType, MediaSourceDemuxer::EOS_FUZZ);
+ }
+ return SeekPromise::CreateAndResolve(seekTime, __func__);
+}
+
+RefPtr<MediaSourceTrackDemuxer::SamplesPromise>
+MediaSourceTrackDemuxer::DoGetSamples(int32_t aNumSamples) {
+ if (!mManager) {
+ return SamplesPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_CANCELED,
+ RESULT_DETAIL("manager is detached.")),
+ __func__);
+ }
+
+ MOZ_ASSERT(OnTaskQueue());
+ if (mReset) {
+ // If a reset was recently performed, we ensure that the data
+ // we are about to retrieve is still available.
+ TimeIntervals buffered = mManager->Buffered(mType);
+ if (buffered.IsEmpty() && mManager->IsEnded()) {
+ return SamplesPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_END_OF_STREAM,
+ __func__);
+ }
+
+ // We use a larger fuzz to determine the presentation start
+ // time than the fuzz we use to determine acceptable gaps between
+ // frames. This is needed to fix embedded video issues as seen in the wild
+ // from different muxed stream start times.
+ // See: https://www.w3.org/TR/media-source-2/#presentation-start-time
+ buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ_START);
+ if (!buffered.ContainsWithStrictEnd(TimeUnit::Zero())) {
+ return SamplesPromise::CreateAndReject(
+ NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA, __func__);
+ }
+ mReset = false;
+ }
+ RefPtr<MediaRawData> sample;
+ MediaResult result = NS_OK;
+ if (mNextSample) {
+ sample = mNextSample.ref();
+ mNextSample.reset();
+ } else {
+ sample = mManager->GetSample(mType, MediaSourceDemuxer::EOS_FUZZ, result);
+ }
+ if (!sample) {
+ if (result == NS_ERROR_DOM_MEDIA_END_OF_STREAM ||
+ result == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
+ return SamplesPromise::CreateAndReject(
+ (result == NS_ERROR_DOM_MEDIA_END_OF_STREAM && mManager->IsEnded())
+ ? NS_ERROR_DOM_MEDIA_END_OF_STREAM
+ : NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA,
+ __func__);
+ }
+ return SamplesPromise::CreateAndReject(result, __func__);
+ }
+ RefPtr<SamplesHolder> samples = new SamplesHolder;
+ samples->AppendSample(sample);
+ {
+ MonitorAutoLock mon(mMonitor); // spurious warning will be given
+ // Diagnostic asserts for bug 1810396
+ MOZ_DIAGNOSTIC_ASSERT(sample, "Invalid sample pointer found!");
+ MOZ_DIAGNOSTIC_ASSERT(sample->HasValidTime(), "Invalid sample time found!");
+ if (!sample) {
+ return SamplesPromise::CreateAndReject(NS_ERROR_NULL_POINTER, __func__);
+ }
+ if (mNextRandomAccessPoint <= sample->mTime) {
+ mNextRandomAccessPoint = mManager->GetNextRandomAccessPoint(
+ mType, MediaSourceDemuxer::EOS_FUZZ);
+ }
+ }
+ return SamplesPromise::CreateAndResolve(samples, __func__);
+}
+
+RefPtr<MediaSourceTrackDemuxer::SkipAccessPointPromise>
+MediaSourceTrackDemuxer::DoSkipToNextRandomAccessPoint(
+ const TimeUnit& aTimeThreadshold) {
+ if (!mManager) {
+ return SkipAccessPointPromise::CreateAndReject(
+ SkipFailureHolder(MediaResult(NS_ERROR_DOM_MEDIA_CANCELED,
+ RESULT_DETAIL("manager is detached.")),
+ 0),
+ __func__);
+ }
+
+ MOZ_ASSERT(OnTaskQueue());
+ uint32_t parsed = 0;
+ // Ensure that the data we are about to skip to is still available.
+ TimeIntervals buffered = mManager->Buffered(mType);
+ buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ / 2);
+ if (buffered.ContainsWithStrictEnd(aTimeThreadshold)) {
+ bool found;
+ parsed = mManager->SkipToNextRandomAccessPoint(
+ mType, aTimeThreadshold, MediaSourceDemuxer::EOS_FUZZ, found);
+ if (found) {
+ return SkipAccessPointPromise::CreateAndResolve(parsed, __func__);
+ }
+ }
+ SkipFailureHolder holder(mManager->IsEnded()
+ ? NS_ERROR_DOM_MEDIA_END_OF_STREAM
+ : NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA,
+ parsed);
+ return SkipAccessPointPromise::CreateAndReject(holder, __func__);
+}
+
+bool MediaSourceTrackDemuxer::HasManager(TrackBuffersManager* aManager) const {
+ MOZ_ASSERT(OnTaskQueue());
+ return mManager == aManager;
+}
+
+void MediaSourceTrackDemuxer::DetachManager() {
+ MOZ_ASSERT(OnTaskQueue());
+ MonitorAutoLock mon(mMonitor);
+ mManager = nullptr;
+}
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/MediaSourceDemuxer.h b/dom/media/mediasource/MediaSourceDemuxer.h
new file mode 100644
index 0000000000..215b0210e2
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceDemuxer.h
@@ -0,0 +1,172 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(MediaSourceDemuxer_h_)
+# define MediaSourceDemuxer_h_
+
+# include "MediaDataDemuxer.h"
+# include "MediaResource.h"
+# include "MediaSource.h"
+# include "TrackBuffersManager.h"
+# include "mozilla/Atomics.h"
+# include "mozilla/Maybe.h"
+# include "mozilla/Monitor.h"
+# include "mozilla/TaskQueue.h"
+# include "mozilla/dom/MediaDebugInfoBinding.h"
+
+namespace mozilla {
+
+class AbstractThread;
+class MediaResult;
+class MediaSourceTrackDemuxer;
+
+DDLoggedTypeDeclNameAndBase(MediaSourceDemuxer, MediaDataDemuxer);
+DDLoggedTypeNameAndBase(MediaSourceTrackDemuxer, MediaTrackDemuxer);
+
+class MediaSourceDemuxer : public MediaDataDemuxer,
+ public DecoderDoctorLifeLogger<MediaSourceDemuxer> {
+ public:
+ explicit MediaSourceDemuxer(AbstractThread* aAbstractMainThread);
+
+ RefPtr<InitPromise> Init() override;
+
+ uint32_t GetNumberTracks(TrackInfo::TrackType aType) const override;
+
+ already_AddRefed<MediaTrackDemuxer> GetTrackDemuxer(
+ TrackInfo::TrackType aType, uint32_t aTrackNumber) override;
+
+ bool IsSeekable() const override;
+
+ UniquePtr<EncryptionInfo> GetCrypto() override;
+
+ bool ShouldComputeStartTime() const override { return false; }
+
+ /* interface for TrackBuffersManager */
+ void AttachSourceBuffer(RefPtr<TrackBuffersManager>& aSourceBuffer);
+ void DetachSourceBuffer(RefPtr<TrackBuffersManager>& aSourceBuffer);
+ TaskQueue* GetTaskQueue() { return mTaskQueue; }
+ void NotifyInitDataArrived();
+
+ // Populates aInfo with info describing the state of the MediaSource internal
+ // buffered data. Used for debugging purposes.
+ // aInfo should *not* be accessed until the returned promise has been resolved
+ // or rejected.
+ RefPtr<GenericPromise> GetDebugInfo(
+ dom::MediaSourceDemuxerDebugInfo& aInfo) const;
+
+ void AddSizeOfResources(MediaSourceDecoder::ResourceSizes* aSizes);
+
+ // Gap allowed between frames.
+ // Due to inaccuracies in determining buffer end
+ // frames (Bug 1065207). This value is based on videos seen in the wild.
+ static constexpr media::TimeUnit EOS_FUZZ =
+ media::TimeUnit::FromMicroseconds(500000);
+
+ // Largest gap allowed between muxed streams with different
+ // start times. The specs suggest up to a "reasonably short" gap of
+ // one second. We conservatively choose to allow a gap up to a bit over
+ // a half-second here, which is still twice our previous effective value
+ // and should resolve embedded playback issues on Twitter, DokiDoki, etc.
+ // See: https://www.w3.org/TR/media-source-2/#presentation-start-time
+ static constexpr media::TimeUnit EOS_FUZZ_START =
+ media::TimeUnit::FromMicroseconds(550000);
+
+ private:
+ ~MediaSourceDemuxer();
+ friend class MediaSourceTrackDemuxer;
+ // Scan source buffers and update information.
+ bool ScanSourceBuffersForContent();
+ RefPtr<TrackBuffersManager> GetManager(TrackInfo::TrackType aType)
+ MOZ_REQUIRES(mMonitor);
+ TrackInfo* GetTrackInfo(TrackInfo::TrackType) MOZ_REQUIRES(mMonitor);
+ void DoAttachSourceBuffer(RefPtr<TrackBuffersManager>&& aSourceBuffer);
+ void DoDetachSourceBuffer(const RefPtr<TrackBuffersManager>& aSourceBuffer);
+ bool OnTaskQueue() {
+ return !GetTaskQueue() || GetTaskQueue()->IsCurrentThreadIn();
+ }
+
+ RefPtr<TaskQueue> mTaskQueue;
+ // Accessed on mTaskQueue or from destructor
+ nsTArray<RefPtr<TrackBuffersManager>> mSourceBuffers;
+ MozPromiseHolder<InitPromise> mInitPromise;
+
+ // Monitor to protect members below across multiple threads.
+ mutable Monitor mMonitor;
+ nsTArray<RefPtr<MediaSourceTrackDemuxer>> mDemuxers MOZ_GUARDED_BY(mMonitor);
+ RefPtr<TrackBuffersManager> mAudioTrack MOZ_GUARDED_BY(mMonitor);
+ RefPtr<TrackBuffersManager> mVideoTrack MOZ_GUARDED_BY(mMonitor);
+ MediaInfo mInfo MOZ_GUARDED_BY(mMonitor);
+};
+
+class MediaSourceTrackDemuxer
+ : public MediaTrackDemuxer,
+ public DecoderDoctorLifeLogger<MediaSourceTrackDemuxer> {
+ public:
+ MediaSourceTrackDemuxer(MediaSourceDemuxer* aParent,
+ TrackInfo::TrackType aType,
+ TrackBuffersManager* aManager)
+ MOZ_REQUIRES(aParent->mMonitor);
+
+ UniquePtr<TrackInfo> GetInfo() const override;
+
+ RefPtr<SeekPromise> Seek(const media::TimeUnit& aTime) override;
+
+ RefPtr<SamplesPromise> GetSamples(int32_t aNumSamples = 1) override;
+
+ void Reset() override;
+
+ nsresult GetNextRandomAccessPoint(media::TimeUnit* aTime) override;
+
+ RefPtr<SkipAccessPointPromise> SkipToNextRandomAccessPoint(
+ const media::TimeUnit& aTimeThreshold) override;
+
+ media::TimeIntervals GetBuffered() override;
+
+ void BreakCycles() override;
+
+ bool GetSamplesMayBlock() const override { return false; }
+
+ bool HasManager(TrackBuffersManager* aManager) const;
+ void DetachManager();
+
+ private:
+ bool OnTaskQueue() const { return mTaskQueue->IsCurrentThreadIn(); }
+
+ RefPtr<SeekPromise> DoSeek(const media::TimeUnit& aTime);
+ RefPtr<SamplesPromise> DoGetSamples(int32_t aNumSamples);
+ RefPtr<SkipAccessPointPromise> DoSkipToNextRandomAccessPoint(
+ const media::TimeUnit& aTimeThreadshold);
+ already_AddRefed<MediaRawData> GetSample(MediaResult& aError);
+ // Return the timestamp of the next keyframe after mLastSampleIndex.
+ media::TimeUnit GetNextRandomAccessPoint();
+
+ RefPtr<MediaSourceDemuxer> mParent;
+ const RefPtr<TaskQueue> mTaskQueue;
+
+ TrackInfo::TrackType mType;
+ // Monitor protecting members below accessed from multiple threads.
+ Monitor mMonitor MOZ_UNANNOTATED;
+ media::TimeUnit mNextRandomAccessPoint;
+ // Would be accessed in MFR's demuxer proxy task queue and TaskQueue, and
+ // only be set on the TaskQueue. It can be accessed while on TaskQueue without
+ // the need for the lock.
+ RefPtr<TrackBuffersManager> mManager;
+
+ // Only accessed on TaskQueue
+ Maybe<RefPtr<MediaRawData>> mNextSample;
+ // Set to true following a reset. Ensure that the next sample demuxed
+ // is available at position 0.
+ // Only accessed on TaskQueue
+ bool mReset;
+
+ // Amount of pre-roll time when seeking.
+ // Set to 80ms if track is Opus.
+ const media::TimeUnit mPreRoll;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/mediasource/MediaSourceUtils.cpp b/dom/media/mediasource/MediaSourceUtils.cpp
new file mode 100644
index 0000000000..37e94d7dc7
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceUtils.cpp
@@ -0,0 +1,49 @@
+/* -*- mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include "MediaSourceUtils.h"
+
+#include "mozilla/Logging.h"
+#include "nsPrintfCString.h"
+
+namespace mozilla {
+
+nsCString DumpTimeRanges(const media::TimeIntervals& aRanges) {
+ nsCString dump;
+
+ dump = "[";
+
+ for (uint32_t i = 0; i < aRanges.Length(); ++i) {
+ if (i > 0) {
+ dump += ", ";
+ }
+ dump += nsPrintfCString("(%f, %f)", aRanges.Start(i).ToSeconds(),
+ aRanges.End(i).ToSeconds());
+ }
+
+ dump += "]";
+
+ return dump;
+}
+
+nsCString DumpTimeRangesRaw(const media::TimeIntervals& aRanges) {
+ nsCString dump;
+
+ dump = "[";
+
+ for (uint32_t i = 0; i < aRanges.Length(); ++i) {
+ if (i > 0) {
+ dump += ", ";
+ }
+ dump += nsPrintfCString("(%s, %s)", aRanges.Start(i).ToString().get(),
+ aRanges.End(i).ToString().get());
+ }
+
+ dump += "]";
+
+ return dump;
+}
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/MediaSourceUtils.h b/dom/media/mediasource/MediaSourceUtils.h
new file mode 100644
index 0000000000..18c2d387df
--- /dev/null
+++ b/dom/media/mediasource/MediaSourceUtils.h
@@ -0,0 +1,20 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_MEDIASOURCEUTILS_H_
+#define MOZILLA_MEDIASOURCEUTILS_H_
+
+#include "nsString.h"
+#include "TimeUnits.h"
+
+namespace mozilla {
+
+nsCString DumpTimeRanges(const media::TimeIntervals& aRanges);
+nsCString DumpTimeRangesRaw(const media::TimeIntervals& aRanges);
+
+} // namespace mozilla
+
+#endif /* MOZILLA_MEDIASOURCEUTILS_H_ */
diff --git a/dom/media/mediasource/ResourceQueue.cpp b/dom/media/mediasource/ResourceQueue.cpp
new file mode 100644
index 0000000000..717638b5c8
--- /dev/null
+++ b/dom/media/mediasource/ResourceQueue.cpp
@@ -0,0 +1,204 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "ResourceQueue.h"
+#include "MediaData.h"
+#include "mozilla/ErrorResult.h"
+#include "mozilla/IntegerPrintfMacros.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Sprintf.h"
+#include "mozilla/Unused.h"
+
+extern mozilla::LogModule* GetSourceBufferResourceLog();
+
+#define SBR_DEBUG(arg, ...) \
+ MOZ_LOG(GetSourceBufferResourceLog(), mozilla::LogLevel::Debug, \
+ ("ResourceQueue(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define SBR_DEBUGV(arg, ...) \
+ MOZ_LOG(GetSourceBufferResourceLog(), mozilla::LogLevel::Verbose, \
+ ("ResourceQueue(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+namespace mozilla {
+
+ResourceItem::ResourceItem(const MediaSpan& aData, uint64_t aOffset)
+ : mData(aData), mOffset(aOffset) {}
+
+size_t ResourceItem::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
+ return aMallocSizeOf(this);
+}
+
+class ResourceQueueDeallocator : public nsDequeFunctor<ResourceItem> {
+ void operator()(ResourceItem* aObject) override { delete aObject; }
+};
+
+ResourceQueue::ResourceQueue()
+ : nsDeque<ResourceItem>(new ResourceQueueDeallocator()),
+ mLogicalLength(0),
+ mOffset(0) {}
+
+uint64_t ResourceQueue::GetOffset() { return mOffset; }
+
+uint64_t ResourceQueue::GetLength() { return mLogicalLength; }
+
+const uint8_t* ResourceQueue::GetContiguousAccess(int64_t aOffset,
+ size_t aSize) {
+ uint32_t offset = 0;
+ uint32_t start = GetAtOffset(aOffset, &offset);
+ if (start >= GetSize()) {
+ return nullptr;
+ }
+ ResourceItem* item = ResourceAt(start);
+ if (offset + aSize > item->mData.Length()) {
+ return nullptr;
+ }
+ return item->mData.Elements() + offset;
+}
+
+void ResourceQueue::CopyData(uint64_t aOffset, uint32_t aCount, char* aDest) {
+ uint32_t offset = 0;
+ uint32_t start = GetAtOffset(aOffset, &offset);
+ size_t i = start;
+ while (i < uint32_t(GetSize()) && aCount > 0) {
+ ResourceItem* item = ResourceAt(i++);
+ uint32_t bytes = std::min(aCount, uint32_t(item->mData.Length() - offset));
+ if (bytes != 0) {
+ memcpy(aDest, item->mData.Elements() + offset, bytes);
+ offset = 0;
+ aCount -= bytes;
+ aDest += bytes;
+ }
+ }
+}
+
+void ResourceQueue::AppendItem(const MediaSpan& aData) {
+ uint64_t offset = mLogicalLength;
+ mLogicalLength += aData.Length();
+ Push(new ResourceItem(aData, offset));
+}
+
+uint32_t ResourceQueue::Evict(uint64_t aOffset, uint32_t aSizeToEvict) {
+ SBR_DEBUG("Evict(aOffset=%" PRIu64 ", aSizeToEvict=%u)", aOffset,
+ aSizeToEvict);
+ return EvictBefore(std::min(aOffset, mOffset + (uint64_t)aSizeToEvict));
+}
+
+uint32_t ResourceQueue::EvictBefore(uint64_t aOffset) {
+ SBR_DEBUG("EvictBefore(%" PRIu64 ")", aOffset);
+ uint32_t evicted = 0;
+ while (GetSize()) {
+ ResourceItem* item = ResourceAt(0);
+ SBR_DEBUG("item=%p length=%zu offset=%" PRIu64, item, item->mData.Length(),
+ mOffset);
+ if (item->mData.Length() + mOffset >= aOffset) {
+ if (aOffset <= mOffset) {
+ break;
+ }
+ uint32_t offset = aOffset - mOffset;
+ mOffset += offset;
+ evicted += offset;
+ item->mData.RemoveFront(offset);
+ item->mOffset += offset;
+ break;
+ }
+ mOffset += item->mData.Length();
+ evicted += item->mData.Length();
+ delete PopFront();
+ }
+ return evicted;
+}
+
+uint32_t ResourceQueue::EvictAll() {
+ SBR_DEBUG("EvictAll()");
+ uint32_t evicted = 0;
+ while (GetSize()) {
+ ResourceItem* item = ResourceAt(0);
+ SBR_DEBUG("item=%p length=%zu offset=%" PRIu64, item, item->mData.Length(),
+ mOffset);
+ mOffset += item->mData.Length();
+ evicted += item->mData.Length();
+ delete PopFront();
+ }
+ return evicted;
+}
+
+size_t ResourceQueue::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
+ // Calculate the size of the internal deque.
+ size_t size = nsDeque<ResourceItem>::SizeOfExcludingThis(aMallocSizeOf);
+
+ // Sum the ResourceItems. The ResourceItems's MediaSpans may share the
+ // same underlying MediaByteBuffers, so we need to de-dupe the buffers
+ // in order to report an accurate size.
+ nsTArray<MediaByteBuffer*> buffers;
+ for (uint32_t i = 0; i < uint32_t(GetSize()); ++i) {
+ const ResourceItem* item = ResourceAt(i);
+ size += item->SizeOfIncludingThis(aMallocSizeOf);
+ if (!buffers.Contains(item->mData.Buffer())) {
+ buffers.AppendElement(item->mData.Buffer());
+ }
+ }
+
+ for (MediaByteBuffer* buffer : buffers) {
+ size += buffer->ShallowSizeOfExcludingThis(aMallocSizeOf);
+ }
+
+ return size;
+}
+
+#if defined(DEBUG)
+void ResourceQueue::Dump(const char* aPath) {
+ for (uint32_t i = 0; i < uint32_t(GetSize()); ++i) {
+ ResourceItem* item = ResourceAt(i);
+
+ char buf[255];
+ SprintfLiteral(buf, "%s/%08u.bin", aPath, i);
+ FILE* fp = fopen(buf, "wb");
+ if (!fp) {
+ return;
+ }
+ Unused << fwrite(item->mData.Elements(), item->mData.Length(), 1, fp);
+ fclose(fp);
+ }
+}
+#endif
+
+ResourceItem* ResourceQueue::ResourceAt(uint32_t aIndex) const {
+ return static_cast<ResourceItem*>(ObjectAt(aIndex));
+}
+
+uint32_t ResourceQueue::GetAtOffset(uint64_t aOffset,
+ uint32_t* aResourceOffset) const {
+ MOZ_RELEASE_ASSERT(aOffset >= mOffset);
+
+ size_t hi = GetSize();
+ size_t lo = 0;
+ while (lo < hi) {
+ size_t mid = lo + (hi - lo) / 2;
+ const ResourceItem* resource = ResourceAt(mid);
+ if (resource->mOffset <= aOffset &&
+ aOffset < resource->mOffset + resource->mData.Length()) {
+ if (aResourceOffset) {
+ *aResourceOffset = aOffset - resource->mOffset;
+ }
+ return uint32_t(mid);
+ }
+ if (resource->mOffset + resource->mData.Length() <= aOffset) {
+ lo = mid + 1;
+ } else {
+ hi = mid;
+ }
+ }
+
+ return uint32_t(GetSize());
+}
+
+ResourceItem* ResourceQueue::PopFront() {
+ return nsDeque<ResourceItem>::PopFront();
+}
+
+#undef SBR_DEBUG
+#undef SBR_DEBUGV
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/ResourceQueue.h b/dom/media/mediasource/ResourceQueue.h
new file mode 100644
index 0000000000..efd1e0c20a
--- /dev/null
+++ b/dom/media/mediasource/ResourceQueue.h
@@ -0,0 +1,88 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_RESOURCEQUEUE_H_
+#define MOZILLA_RESOURCEQUEUE_H_
+
+#include "nsDeque.h"
+#include "MediaSpan.h"
+
+namespace mozilla {
+
+class ErrorResult;
+
+// A SourceBufferResource has a queue containing the data that is appended
+// to it. The queue holds instances of ResourceItem which is an array of the
+// bytes. Appending data to the SourceBufferResource pushes this onto the
+// queue.
+
+// Data is evicted once it reaches a size threshold. This pops the items off
+// the front of the queue and deletes it. If an eviction happens then the
+// MediaSource is notified (done in SourceBuffer::AppendData) which then
+// requests all SourceBuffers to evict data up to approximately the same
+// timepoint.
+
+struct ResourceItem {
+ ResourceItem(const MediaSpan& aData, uint64_t aOffset);
+ size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
+ MediaSpan mData;
+ uint64_t mOffset;
+};
+
+class ResourceQueue : private nsDeque<ResourceItem> {
+ public:
+ ResourceQueue();
+
+ // Returns the logical byte offset of the start of the data.
+ uint64_t GetOffset();
+
+ // Returns the length of all items in the queue plus the offset.
+ // This is the logical length of the resource.
+ uint64_t GetLength();
+
+ // Copies aCount bytes from aOffset in the queue into aDest.
+ void CopyData(uint64_t aOffset, uint32_t aCount, char* aDest);
+
+ void AppendItem(const MediaSpan& aData);
+
+ // Tries to evict at least aSizeToEvict from the queue up until
+ // aOffset. Returns amount evicted.
+ uint32_t Evict(uint64_t aOffset, uint32_t aSizeToEvict);
+
+ uint32_t EvictBefore(uint64_t aOffset);
+
+ uint32_t EvictAll();
+
+ size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
+
+#if defined(DEBUG)
+ void Dump(const char* aPath);
+#endif
+
+ const uint8_t* GetContiguousAccess(int64_t aOffset, size_t aSize);
+
+ private:
+ ResourceItem* ResourceAt(uint32_t aIndex) const;
+
+ // Returns the index of the resource that contains the given
+ // logical offset. aResourceOffset will contain the offset into
+ // the resource at the given index returned if it is not null. If
+ // no such resource exists, returns GetSize() and aOffset is
+ // untouched.
+ uint32_t GetAtOffset(uint64_t aOffset, uint32_t* aResourceOffset) const;
+
+ ResourceItem* PopFront();
+
+ // Logical length of the resource.
+ uint64_t mLogicalLength;
+
+ // Logical offset into the resource of the first element in the queue.
+ uint64_t mOffset;
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_RESOURCEQUEUE_H_ */
diff --git a/dom/media/mediasource/SourceBuffer.cpp b/dom/media/mediasource/SourceBuffer.cpp
new file mode 100644
index 0000000000..391e5253c9
--- /dev/null
+++ b/dom/media/mediasource/SourceBuffer.cpp
@@ -0,0 +1,765 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "SourceBuffer.h"
+
+#include "AsyncEventRunner.h"
+#include "MediaData.h"
+#include "MediaSourceDemuxer.h"
+#include "MediaSourceUtils.h"
+#include "mozilla/ErrorResult.h"
+#include "mozilla/FloatingPoint.h"
+#include "mozilla/Preferences.h"
+#include "mozilla/dom/MediaSourceBinding.h"
+#include "mozilla/dom/TimeRanges.h"
+#include "nsError.h"
+#include "nsIRunnable.h"
+#include "nsThreadUtils.h"
+#include "mozilla/Logging.h"
+#include <time.h>
+#include "TimeUnits.h"
+
+struct JSContext;
+class JSObject;
+
+extern mozilla::LogModule* GetMediaSourceLog();
+extern mozilla::LogModule* GetMediaSourceAPILog();
+
+#define MSE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Debug, "(%s)::%s: " arg, \
+ mType.OriginalString().Data(), __func__, ##__VA_ARGS__)
+#define MSE_DEBUGV(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Verbose, "(%s)::%s: " arg, \
+ mType.OriginalString().Data(), __func__, ##__VA_ARGS__)
+#define MSE_API(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceAPILog(), mozilla::LogLevel::Debug, \
+ "(%s)::%s: " arg, mType.OriginalString().Data(), __func__, \
+ ##__VA_ARGS__)
+
+namespace mozilla {
+
+using media::TimeUnit;
+typedef SourceBufferAttributes::AppendState AppendState;
+
+namespace dom {
+
+void SourceBuffer::SetMode(SourceBufferAppendMode aMode, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("SetMode(aMode=%" PRIu32 ")", static_cast<uint32_t>(aMode));
+ if (!IsAttached() || mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (mCurrentAttributes.mGenerateTimestamps &&
+ aMode == SourceBufferAppendMode::Segments) {
+ aRv.ThrowTypeError(
+ "Can't set mode to \"segments\" when the byte stream generates "
+ "timestamps");
+ return;
+ }
+ MOZ_ASSERT(mMediaSource->ReadyState() != MediaSourceReadyState::Closed);
+ if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
+ mMediaSource->SetReadyState(MediaSourceReadyState::Open);
+ }
+ if (mCurrentAttributes.GetAppendState() ==
+ AppendState::PARSING_MEDIA_SEGMENT) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ if (aMode == SourceBufferAppendMode::Sequence) {
+ // Will set GroupStartTimestamp to GroupEndTimestamp.
+ mCurrentAttributes.RestartGroupStartTimestamp();
+ }
+
+ mCurrentAttributes.SetAppendMode(aMode);
+}
+
+void SourceBuffer::SetTimestampOffset(double aTimestampOffset,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("SetTimestampOffset(aTimestampOffset=%f)", aTimestampOffset);
+ if (!IsAttached() || mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ MOZ_ASSERT(mMediaSource->ReadyState() != MediaSourceReadyState::Closed);
+ if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
+ mMediaSource->SetReadyState(MediaSourceReadyState::Open);
+ }
+ if (mCurrentAttributes.GetAppendState() ==
+ AppendState::PARSING_MEDIA_SEGMENT) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ mCurrentAttributes.SetApparentTimestampOffset(aTimestampOffset);
+ if (mCurrentAttributes.GetAppendMode() == SourceBufferAppendMode::Sequence) {
+ mCurrentAttributes.SetGroupStartTimestamp(
+ mCurrentAttributes.GetTimestampOffset());
+ }
+}
+
+media::TimeIntervals SourceBuffer::GetBufferedIntervals() {
+ MOZ_ASSERT(mTrackBuffersManager);
+ return mTrackBuffersManager->Buffered();
+}
+
+TimeRanges* SourceBuffer::GetBuffered(ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ // http://w3c.github.io/media-source/index.html#widl-SourceBuffer-buffered
+ // 1. If this object has been removed from the sourceBuffers attribute of the
+ // parent media source then throw an InvalidStateError exception and abort
+ // these steps.
+ if (!IsAttached()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+ bool rangeChanged = true;
+ media::TimeIntervals intersection = mTrackBuffersManager->Buffered();
+ MSE_DEBUGV("intersection=%s", DumpTimeRanges(intersection).get());
+ if (mBuffered) {
+ media::TimeIntervals currentValue(mBuffered->ToTimeIntervals());
+ rangeChanged = (intersection != currentValue);
+ MSE_DEBUGV("currentValue=%s", DumpTimeRanges(currentValue).get());
+ }
+ // 5. If intersection ranges does not contain the exact same range information
+ // as the current value of this attribute, then update the current value of
+ // this attribute to intersection ranges.
+ if (rangeChanged) {
+ mBuffered = new TimeRanges(ToSupports(this),
+ intersection.ToMicrosecondResolution());
+ }
+ // 6. Return the current value of this attribute.
+ return mBuffered;
+}
+
+media::TimeIntervals SourceBuffer::GetTimeIntervals() {
+ MOZ_ASSERT(mTrackBuffersManager);
+ return mTrackBuffersManager->Buffered();
+}
+
+void SourceBuffer::SetAppendWindowStart(double aAppendWindowStart,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("SetAppendWindowStart(aAppendWindowStart=%f)", aAppendWindowStart);
+ DDLOG(DDLogCategory::API, "SetAppendWindowStart", aAppendWindowStart);
+ if (!IsAttached() || mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (aAppendWindowStart < 0 ||
+ aAppendWindowStart >= mCurrentAttributes.GetAppendWindowEnd()) {
+ aRv.ThrowTypeError("Invalid appendWindowStart value");
+ return;
+ }
+ mCurrentAttributes.SetAppendWindowStart(aAppendWindowStart);
+}
+
+void SourceBuffer::SetAppendWindowEnd(double aAppendWindowEnd,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("SetAppendWindowEnd(aAppendWindowEnd=%f)", aAppendWindowEnd);
+ DDLOG(DDLogCategory::API, "SetAppendWindowEnd", aAppendWindowEnd);
+ if (!IsAttached() || mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (std::isnan(aAppendWindowEnd) ||
+ aAppendWindowEnd <= mCurrentAttributes.GetAppendWindowStart()) {
+ aRv.ThrowTypeError("Invalid appendWindowEnd value");
+ return;
+ }
+ mCurrentAttributes.SetAppendWindowEnd(aAppendWindowEnd);
+}
+
+void SourceBuffer::AppendBuffer(const ArrayBuffer& aData, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("AppendBuffer(ArrayBuffer)");
+ aData.ComputeState();
+ DDLOG(DDLogCategory::API, "AppendBuffer", aData.Length());
+ AppendData(aData.Data(), aData.Length(), aRv);
+}
+
+void SourceBuffer::AppendBuffer(const ArrayBufferView& aData,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("AppendBuffer(ArrayBufferView)");
+ aData.ComputeState();
+ DDLOG(DDLogCategory::API, "AppendBuffer", aData.Length());
+ AppendData(aData.Data(), aData.Length(), aRv);
+}
+
+already_AddRefed<Promise> SourceBuffer::AppendBufferAsync(
+ const ArrayBuffer& aData, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ MSE_API("AppendBufferAsync(ArrayBuffer)");
+ aData.ComputeState();
+ DDLOG(DDLogCategory::API, "AppendBufferAsync", aData.Length());
+
+ return AppendDataAsync(aData.Data(), aData.Length(), aRv);
+}
+
+already_AddRefed<Promise> SourceBuffer::AppendBufferAsync(
+ const ArrayBufferView& aData, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ MSE_API("AppendBufferAsync(ArrayBufferView)");
+ aData.ComputeState();
+ DDLOG(DDLogCategory::API, "AppendBufferAsync", aData.Length());
+
+ return AppendDataAsync(aData.Data(), aData.Length(), aRv);
+}
+
+void SourceBuffer::Abort(ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("Abort()");
+ if (!IsAttached()) {
+ DDLOG(DDLogCategory::API, "Abort", NS_ERROR_DOM_INVALID_STATE_ERR);
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (mMediaSource->ReadyState() != MediaSourceReadyState::Open) {
+ DDLOG(DDLogCategory::API, "Abort", NS_ERROR_DOM_INVALID_STATE_ERR);
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (mPendingRemoval.Exists()) {
+ DDLOG(DDLogCategory::API, "Abort", NS_ERROR_DOM_INVALID_STATE_ERR);
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ DDLOG(DDLogCategory::API, "Abort", NS_OK);
+ AbortBufferAppend();
+ ResetParserState();
+ mCurrentAttributes.SetAppendWindowStart(0);
+ mCurrentAttributes.SetAppendWindowEnd(PositiveInfinity<double>());
+}
+
+void SourceBuffer::AbortBufferAppend() {
+ if (mUpdating) {
+ mCompletionPromise.DisconnectIfExists();
+ if (mPendingAppend.Exists()) {
+ mPendingAppend.Disconnect();
+ mTrackBuffersManager->AbortAppendData();
+ }
+ AbortUpdating();
+ }
+}
+
+void SourceBuffer::ResetParserState() {
+ mTrackBuffersManager->ResetParserState(mCurrentAttributes);
+}
+
+void SourceBuffer::Remove(double aStart, double aEnd, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("Remove(aStart=%f, aEnd=%f)", aStart, aEnd);
+ DDLOG(DDLogCategory::API, "Remove-from", aStart);
+ DDLOG(DDLogCategory::API, "Remove-until", aEnd);
+
+ PrepareRemove(aStart, aEnd, aRv);
+ if (aRv.Failed()) {
+ return;
+ }
+ RangeRemoval(aStart, aEnd);
+}
+
+already_AddRefed<Promise> SourceBuffer::RemoveAsync(double aStart, double aEnd,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("RemoveAsync(aStart=%f, aEnd=%f)", aStart, aEnd);
+ DDLOG(DDLogCategory::API, "Remove-from", aStart);
+ DDLOG(DDLogCategory::API, "Remove-until", aEnd);
+
+ if (!IsAttached()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+
+ nsCOMPtr<nsIGlobalObject> parentObject =
+ do_QueryInterface(mMediaSource->GetParentObject());
+ if (!parentObject) {
+ aRv.Throw(NS_ERROR_UNEXPECTED);
+ return nullptr;
+ }
+
+ RefPtr<Promise> promise = Promise::Create(parentObject, aRv);
+ if (aRv.Failed()) {
+ return nullptr;
+ }
+
+ PrepareRemove(aStart, aEnd, aRv);
+
+ if (aRv.Failed()) {
+ // The bindings will automatically return a rejected promise.
+ return nullptr;
+ }
+ MOZ_ASSERT(!mDOMPromise, "Can't have a pending operation going");
+ mDOMPromise = promise;
+ RangeRemoval(aStart, aEnd);
+
+ return promise.forget();
+}
+
+void SourceBuffer::PrepareRemove(double aStart, double aEnd, ErrorResult& aRv) {
+ if (!IsAttached()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+ if (std::isnan(mMediaSource->Duration())) {
+ aRv.ThrowTypeError("Duration is NaN");
+ return;
+ }
+ if (aStart < 0 || aStart > mMediaSource->Duration()) {
+ aRv.ThrowTypeError("Invalid start value");
+ return;
+ }
+ if (aEnd <= aStart || std::isnan(aEnd)) {
+ aRv.ThrowTypeError("Invalid end value");
+ return;
+ }
+ if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
+ mMediaSource->SetReadyState(MediaSourceReadyState::Open);
+ }
+}
+
+void SourceBuffer::RangeRemoval(double aStart, double aEnd) {
+ StartUpdating();
+
+ RefPtr<SourceBuffer> self = this;
+ mTrackBuffersManager
+ ->RangeRemoval(TimeUnit::FromSeconds(aStart), TimeUnit::FromSeconds(aEnd))
+ ->Then(
+ mAbstractMainThread, __func__,
+ [self](bool) {
+ self->mPendingRemoval.Complete();
+ self->StopUpdating();
+ },
+ []() { MOZ_ASSERT(false); })
+ ->Track(mPendingRemoval);
+}
+
+void SourceBuffer::ChangeType(const nsAString& aType, ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ // 1. If type is an empty string then throw a TypeError exception and abort
+ // these steps.
+ if (aType.IsEmpty()) {
+ aRv.ThrowTypeError("Type must not be empty");
+ return;
+ }
+
+ // 2. If this object has been removed from the sourceBuffers attribute of the
+ // parent media source , then throw an InvalidStateError exception and
+ // abort these steps.
+ // 3. If the updating attribute equals true, then throw an InvalidStateError
+ // exception and abort these steps.
+ if (!IsAttached() || mUpdating) {
+ DDLOG(DDLogCategory::API, "ChangeType", NS_ERROR_DOM_INVALID_STATE_ERR);
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return;
+ }
+
+ // 4. If type contains a MIME type that is not supported or contains a MIME
+ // type that is not supported with the types specified (currently or
+ // previously) of SourceBuffer objects in the sourceBuffers attribute of
+ // the parent media source , then throw a NotSupportedError exception and
+ // abort these steps.
+ DecoderDoctorDiagnostics diagnostics;
+ MediaSource::IsTypeSupported(aType, &diagnostics, aRv);
+ bool supported = !aRv.Failed();
+ diagnostics.StoreFormatDiagnostics(
+ mMediaSource->GetOwner() ? mMediaSource->GetOwner()->GetExtantDoc()
+ : nullptr,
+ aType, supported, __func__);
+ MSE_API("ChangeType(aType=%s)%s", NS_ConvertUTF16toUTF8(aType).get(),
+ supported ? "" : " [not supported]");
+ if (!supported) {
+ DDLOG(DDLogCategory::API, "ChangeType",
+ static_cast<nsresult>(aRv.ErrorCodeAsInt()));
+ return;
+ }
+
+ // 5. If the readyState attribute of the parent media source is in the "ended"
+ // state then run the following steps:
+ // 1. Set the readyState attribute of the parent media source to "open"
+ // 2. Queue a task to fire a simple event named sourceopen at the parent
+ // media source .
+ MOZ_ASSERT(mMediaSource->ReadyState() != MediaSourceReadyState::Closed);
+ if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
+ mMediaSource->SetReadyState(MediaSourceReadyState::Open);
+ }
+ Maybe<MediaContainerType> containerType = MakeMediaContainerType(aType);
+ MOZ_ASSERT(containerType);
+ mType = *containerType;
+ // 6. Run the reset parser state algorithm .
+ ResetParserState();
+
+ // 7. Update the generate timestamps flag on this SourceBuffer object to the
+ // value in the "Generate Timestamps Flag" column of the byte stream format
+ // registry [ MSE-REGISTRY ] entry that is associated with type .
+ if (mType.Type() == MEDIAMIMETYPE("audio/mpeg") ||
+ mType.Type() == MEDIAMIMETYPE("audio/aac")) {
+ mCurrentAttributes.mGenerateTimestamps = true;
+ // 8. If the generate timestamps flag equals true:
+ // Set the mode attribute on this SourceBuffer object to "sequence" ,
+ // including running the associated steps for that attribute being set.
+ ErrorResult dummy;
+ SetMode(SourceBufferAppendMode::Sequence, dummy);
+ } else {
+ mCurrentAttributes.mGenerateTimestamps = false;
+ // Otherwise: Keep the previous value of the mode attribute on this
+ // SourceBuffer object, without running any associated steps for that
+ // attribute being set.
+ }
+
+ // 9. Set pending initialization segment for changeType flag to true.
+ mTrackBuffersManager->ChangeType(mType);
+}
+
+void SourceBuffer::Detach() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("Detach");
+ if (!mMediaSource) {
+ MSE_DEBUG("Already detached");
+ return;
+ }
+ AbortBufferAppend();
+ if (mTrackBuffersManager) {
+ mMediaSource->GetDecoder()->GetDemuxer()->DetachSourceBuffer(
+ mTrackBuffersManager);
+ mTrackBuffersManager->Detach();
+ }
+ mTrackBuffersManager = nullptr;
+ mMediaSource = nullptr;
+}
+
+void SourceBuffer::Ended() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(IsAttached());
+ MSE_DEBUG("Ended");
+ mTrackBuffersManager->Ended();
+}
+
+SourceBuffer::SourceBuffer(MediaSource* aMediaSource,
+ const MediaContainerType& aType)
+ : DOMEventTargetHelper(aMediaSource->GetParentObject()),
+ mMediaSource(aMediaSource),
+ mAbstractMainThread(aMediaSource->AbstractMainThread()),
+ mCurrentAttributes(aType.Type() == MEDIAMIMETYPE("audio/mpeg") ||
+ aType.Type() == MEDIAMIMETYPE("audio/aac")),
+ mUpdating(false),
+ mActive(false),
+ mType(aType) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(aMediaSource);
+
+ mTrackBuffersManager =
+ new TrackBuffersManager(aMediaSource->GetDecoder(), aType);
+ DDLINKCHILD("track buffers manager", mTrackBuffersManager.get());
+
+ MSE_DEBUG("Create mTrackBuffersManager=%p", mTrackBuffersManager.get());
+
+ ErrorResult dummy;
+ if (mCurrentAttributes.mGenerateTimestamps) {
+ SetMode(SourceBufferAppendMode::Sequence, dummy);
+ } else {
+ SetMode(SourceBufferAppendMode::Segments, dummy);
+ }
+ mMediaSource->GetDecoder()->GetDemuxer()->AttachSourceBuffer(
+ mTrackBuffersManager);
+}
+
+SourceBuffer::~SourceBuffer() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(!mMediaSource);
+ MSE_DEBUG("");
+}
+
+MediaSource* SourceBuffer::GetParentObject() const { return mMediaSource; }
+
+JSObject* SourceBuffer::WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) {
+ return SourceBuffer_Binding::Wrap(aCx, this, aGivenProto);
+}
+
+void SourceBuffer::DispatchSimpleEvent(const char* aName) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("Dispatch event '%s'", aName);
+ DispatchTrustedEvent(NS_ConvertUTF8toUTF16(aName));
+}
+
+void SourceBuffer::QueueAsyncSimpleEvent(const char* aName) {
+ MSE_DEBUG("Queuing event '%s'", aName);
+ nsCOMPtr<nsIRunnable> event = new AsyncEventRunner<SourceBuffer>(this, aName);
+ mAbstractMainThread->Dispatch(event.forget());
+}
+
+void SourceBuffer::StartUpdating() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(!mUpdating);
+ mUpdating = true;
+ QueueAsyncSimpleEvent("updatestart");
+}
+
+void SourceBuffer::StopUpdating() {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (!mUpdating) {
+ // The buffer append or range removal algorithm has been interrupted by
+ // abort().
+ return;
+ }
+ mUpdating = false;
+ QueueAsyncSimpleEvent("update");
+ QueueAsyncSimpleEvent("updateend");
+ if (mDOMPromise) {
+ mDOMPromise->MaybeResolveWithUndefined();
+ mDOMPromise = nullptr;
+ }
+}
+
+void SourceBuffer::AbortUpdating() {
+ MOZ_ASSERT(NS_IsMainThread());
+ mUpdating = false;
+ QueueAsyncSimpleEvent("abort");
+ QueueAsyncSimpleEvent("updateend");
+ if (mDOMPromise) {
+ mDOMPromise->MaybeReject(NS_ERROR_DOM_MEDIA_ABORT_ERR);
+ mDOMPromise = nullptr;
+ }
+}
+
+void SourceBuffer::CheckEndTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ // Check if we need to update mMediaSource duration
+ TimeUnit endTime = mCurrentAttributes.GetGroupEndTimestamp();
+ double duration = mMediaSource->Duration();
+ if (!std::isnan(duration) && endTime.ToSeconds() > duration) {
+ mMediaSource->SetDuration(endTime);
+ }
+}
+
+void SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("AppendData(aLength=%u)", aLength);
+
+ RefPtr<MediaByteBuffer> data = PrepareAppend(aData, aLength, aRv);
+ if (!data) {
+ return;
+ }
+ StartUpdating();
+
+ mTrackBuffersManager->AppendData(data.forget(), mCurrentAttributes)
+ ->Then(mAbstractMainThread, __func__, this,
+ &SourceBuffer::AppendDataCompletedWithSuccess,
+ &SourceBuffer::AppendDataErrored)
+ ->Track(mPendingAppend);
+}
+
+already_AddRefed<Promise> SourceBuffer::AppendDataAsync(const uint8_t* aData,
+ uint32_t aLength,
+ ErrorResult& aRv) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (!IsAttached()) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+
+ nsCOMPtr<nsIGlobalObject> parentObject =
+ do_QueryInterface(mMediaSource->GetParentObject());
+ if (!parentObject) {
+ aRv.Throw(NS_ERROR_UNEXPECTED);
+ return nullptr;
+ }
+
+ RefPtr<Promise> promise = Promise::Create(parentObject, aRv);
+ if (aRv.Failed()) {
+ return nullptr;
+ }
+
+ AppendData(aData, aLength, aRv);
+
+ if (aRv.Failed()) {
+ return nullptr;
+ }
+
+ MOZ_ASSERT(!mDOMPromise, "Can't have a pending operation going");
+ mDOMPromise = promise;
+
+ return promise.forget();
+}
+
+void SourceBuffer::AppendDataCompletedWithSuccess(
+ const SourceBufferTask::AppendBufferResult& aResult) {
+ MOZ_ASSERT(mUpdating);
+ mPendingAppend.Complete();
+ DDLOG(DDLogCategory::API, "AppendBuffer-completed", NS_OK);
+
+ if (aResult.first) {
+ if (!mActive) {
+ mActive = true;
+ MSE_DEBUG("Init segment received");
+ RefPtr<SourceBuffer> self = this;
+ mMediaSource->SourceBufferIsActive(this)
+ ->Then(mAbstractMainThread, __func__,
+ [self, this]() {
+ MSE_DEBUG("Complete AppendBuffer operation");
+ mCompletionPromise.Complete();
+ StopUpdating();
+ })
+ ->Track(mCompletionPromise);
+ }
+ }
+ if (mActive) {
+ // Tell our parent decoder that we have received new data
+ // and send progress event.
+ mMediaSource->GetDecoder()->NotifyDataArrived();
+ }
+
+ mCurrentAttributes = aResult.second;
+
+ CheckEndTime();
+
+ if (!mCompletionPromise.Exists()) {
+ StopUpdating();
+ }
+}
+
+void SourceBuffer::AppendDataErrored(const MediaResult& aError) {
+ MOZ_ASSERT(mUpdating);
+ mPendingAppend.Complete();
+ DDLOG(DDLogCategory::API, "AppendBuffer-error", aError);
+
+ switch (aError.Code()) {
+ case NS_ERROR_DOM_MEDIA_CANCELED:
+ // Nothing further to do as the trackbuffer has been shutdown.
+ // or append was aborted and abort() has handled all the events.
+ break;
+ default:
+ AppendError(aError);
+ break;
+ }
+}
+
+void SourceBuffer::AppendError(const MediaResult& aDecodeError) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ ResetParserState();
+
+ mUpdating = false;
+
+ QueueAsyncSimpleEvent("error");
+ QueueAsyncSimpleEvent("updateend");
+
+ MOZ_ASSERT(NS_FAILED(aDecodeError));
+
+ mMediaSource->EndOfStream(aDecodeError);
+
+ if (mDOMPromise) {
+ mDOMPromise->MaybeReject(aDecodeError);
+ mDOMPromise = nullptr;
+ }
+}
+
+already_AddRefed<MediaByteBuffer> SourceBuffer::PrepareAppend(
+ const uint8_t* aData, uint32_t aLength, ErrorResult& aRv) {
+ typedef TrackBuffersManager::EvictDataResult Result;
+
+ if (!IsAttached() || mUpdating) {
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+
+ // If the HTMLMediaElement.error attribute is not null, then throw an
+ // InvalidStateError exception and abort these steps.
+ if (!mMediaSource->GetDecoder() ||
+ mMediaSource->GetDecoder()->OwnerHasError()) {
+ MSE_DEBUG("HTMLMediaElement.error is not null");
+ aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
+ return nullptr;
+ }
+
+ if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
+ mMediaSource->SetReadyState(MediaSourceReadyState::Open);
+ }
+
+ // Eviction uses a byte threshold. If the buffer is greater than the
+ // number of bytes then data is evicted.
+ // TODO: Drive evictions off memory pressure notifications.
+ // TODO: Consider a global eviction threshold rather than per TrackBuffer.
+ // Give a chance to the TrackBuffersManager to evict some data if needed.
+ Result evicted = mTrackBuffersManager->EvictData(
+ TimeUnit::FromSeconds(mMediaSource->GetDecoder()->GetCurrentTime()),
+ aLength);
+
+ // See if we have enough free space to append our new data.
+ if (evicted == Result::BUFFER_FULL) {
+ aRv.Throw(NS_ERROR_DOM_MEDIA_SOURCE_FULL_BUFFER_QUOTA_EXCEEDED_ERR);
+ return nullptr;
+ }
+
+ RefPtr<MediaByteBuffer> data = new MediaByteBuffer();
+ if (!data->AppendElements(aData, aLength, fallible)) {
+ aRv.Throw(NS_ERROR_DOM_MEDIA_SOURCE_FULL_BUFFER_QUOTA_EXCEEDED_ERR);
+ return nullptr;
+ }
+ return data.forget();
+}
+
+TimeUnit SourceBuffer::GetBufferedEnd() {
+ MOZ_ASSERT(NS_IsMainThread());
+ ErrorResult dummy;
+ media::TimeIntervals intervals = GetBufferedIntervals();
+ return intervals.GetEnd();
+}
+
+TimeUnit SourceBuffer::HighestStartTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(mTrackBuffersManager);
+ return mTrackBuffersManager->HighestStartTime();
+}
+
+TimeUnit SourceBuffer::HighestEndTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(mTrackBuffersManager);
+ return mTrackBuffersManager->HighestEndTime();
+}
+
+NS_IMPL_CYCLE_COLLECTION_CLASS(SourceBuffer)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(SourceBuffer)
+ tmp->Detach();
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mMediaSource)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mBuffered)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mDOMPromise)
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(DOMEventTargetHelper)
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(SourceBuffer,
+ DOMEventTargetHelper)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMediaSource)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mBuffered)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDOMPromise)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
+NS_IMPL_ADDREF_INHERITED(SourceBuffer, DOMEventTargetHelper)
+NS_IMPL_RELEASE_INHERITED(SourceBuffer, DOMEventTargetHelper)
+
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(SourceBuffer)
+NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
+
+#undef MSE_DEBUG
+#undef MSE_DEBUGV
+#undef MSE_API
+
+} // namespace dom
+
+} // namespace mozilla
diff --git a/dom/media/mediasource/SourceBuffer.h b/dom/media/mediasource/SourceBuffer.h
new file mode 100644
index 0000000000..6155952acf
--- /dev/null
+++ b/dom/media/mediasource/SourceBuffer.h
@@ -0,0 +1,207 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_dom_SourceBuffer_h_
+#define mozilla_dom_SourceBuffer_h_
+
+#include "mozilla/MozPromise.h"
+#include "MediaContainerType.h"
+#include "MediaSource.h"
+#include "js/RootingAPI.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/DOMEventTargetHelper.h"
+#include "mozilla/UniquePtr.h"
+#include "mozilla/dom/SourceBufferBinding.h"
+#include "mozilla/dom/TypedArray.h"
+#include "mozilla/mozalloc.h"
+#include "nsCOMPtr.h"
+#include "nsCycleCollectionNoteChild.h"
+#include "nsCycleCollectionParticipant.h"
+#include "nsISupports.h"
+#include "nscore.h"
+#include "TrackBuffersManager.h"
+#include "SourceBufferTask.h"
+
+class JSObject;
+struct JSContext;
+
+namespace mozilla {
+
+class AbstractThread;
+class ErrorResult;
+class MediaByteBuffer;
+template <typename T>
+class AsyncEventRunner;
+
+DDLoggedTypeName(dom::SourceBuffer);
+
+namespace dom {
+
+class TimeRanges;
+
+class SourceBuffer final : public DOMEventTargetHelper,
+ public DecoderDoctorLifeLogger<SourceBuffer> {
+ public:
+ /** WebIDL Methods. */
+ SourceBufferAppendMode Mode() const {
+ return mCurrentAttributes.GetAppendMode();
+ }
+
+ void SetMode(SourceBufferAppendMode aMode, ErrorResult& aRv);
+
+ bool Updating() const { return mUpdating; }
+
+ TimeRanges* GetBuffered(ErrorResult& aRv);
+ media::TimeIntervals GetTimeIntervals();
+
+ double TimestampOffset() const {
+ return mCurrentAttributes.GetApparentTimestampOffset();
+ }
+
+ void SetTimestampOffset(double aTimestampOffset, ErrorResult& aRv);
+
+ double AppendWindowStart() const {
+ return mCurrentAttributes.GetAppendWindowStart();
+ }
+
+ void SetAppendWindowStart(double aAppendWindowStart, ErrorResult& aRv);
+
+ double AppendWindowEnd() const {
+ return mCurrentAttributes.GetAppendWindowEnd();
+ }
+
+ void SetAppendWindowEnd(double aAppendWindowEnd, ErrorResult& aRv);
+
+ void AppendBuffer(const ArrayBuffer& aData, ErrorResult& aRv);
+ void AppendBuffer(const ArrayBufferView& aData, ErrorResult& aRv);
+
+ already_AddRefed<Promise> AppendBufferAsync(const ArrayBuffer& aData,
+ ErrorResult& aRv);
+ already_AddRefed<Promise> AppendBufferAsync(const ArrayBufferView& aData,
+ ErrorResult& aRv);
+
+ void Abort(ErrorResult& aRv);
+ void AbortBufferAppend();
+
+ void Remove(double aStart, double aEnd, ErrorResult& aRv);
+
+ already_AddRefed<Promise> RemoveAsync(double aStart, double aEnd,
+ ErrorResult& aRv);
+
+ void ChangeType(const nsAString& aType, ErrorResult& aRv);
+
+ IMPL_EVENT_HANDLER(updatestart);
+ IMPL_EVENT_HANDLER(update);
+ IMPL_EVENT_HANDLER(updateend);
+ IMPL_EVENT_HANDLER(error);
+ IMPL_EVENT_HANDLER(abort);
+
+ /** End WebIDL Methods. */
+
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(SourceBuffer, DOMEventTargetHelper)
+
+ SourceBuffer(MediaSource* aMediaSource, const MediaContainerType& aType);
+
+ MediaSource* GetParentObject() const;
+
+ JSObject* WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) override;
+
+ // Notify the SourceBuffer that it has been detached from the
+ // MediaSource's sourceBuffer list.
+ void Detach();
+ bool IsAttached() const { return mMediaSource != nullptr; }
+
+ void Ended();
+
+ media::TimeIntervals GetBufferedIntervals();
+ media::TimeUnit GetBufferedEnd();
+ media::TimeUnit HighestStartTime();
+ media::TimeUnit HighestEndTime();
+
+ // Runs the range removal algorithm as defined by the MSE spec.
+ void RangeRemoval(double aStart, double aEnd);
+
+ bool IsActive() const { return mActive; }
+
+ private:
+ ~SourceBuffer();
+
+ friend class AsyncEventRunner<SourceBuffer>;
+ friend class BufferAppendRunnable;
+ friend class mozilla::TrackBuffersManager;
+ void DispatchSimpleEvent(const char* aName);
+ void QueueAsyncSimpleEvent(const char* aName);
+
+ // Update mUpdating and fire the appropriate events.
+ void StartUpdating();
+ void StopUpdating();
+ void AbortUpdating();
+ void ResetParserState();
+
+ // If the media segment contains data beyond the current duration,
+ // then run the duration change algorithm with new duration set to the
+ // maximum of the current duration and the group end timestamp.
+ void CheckEndTime();
+
+ // Shared implementation of AppendBuffer overloads.
+ void AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv);
+ // Shared implementation of AppendBufferAsync overloads.
+ already_AddRefed<Promise> AppendDataAsync(const uint8_t* aData,
+ uint32_t aLength, ErrorResult& aRv);
+
+ void PrepareRemove(double aStart, double aEnd, ErrorResult& aRv);
+
+ // Implement the "Append Error Algorithm".
+ // Will call endOfStream() with "decode" error if aDecodeError is true.
+ // 3.5.3 Append Error Algorithm
+ // http://w3c.github.io/media-source/#sourcebuffer-append-error
+ void AppendError(const MediaResult& aDecodeError);
+
+ // Implements the "Prepare Append Algorithm". Returns MediaByteBuffer object
+ // on success or nullptr (with aRv set) on error.
+ already_AddRefed<MediaByteBuffer> PrepareAppend(const uint8_t* aData,
+ uint32_t aLength,
+ ErrorResult& aRv);
+
+ void AppendDataCompletedWithSuccess(
+ const SourceBufferTask::AppendBufferResult& aResult);
+ void AppendDataErrored(const MediaResult& aError);
+
+ RefPtr<MediaSource> mMediaSource;
+ const RefPtr<AbstractThread> mAbstractMainThread;
+
+ RefPtr<TrackBuffersManager> mTrackBuffersManager;
+ SourceBufferAttributes mCurrentAttributes;
+
+ bool mUpdating;
+
+ mozilla::Atomic<bool> mActive;
+
+ MozPromiseRequestHolder<SourceBufferTask::AppendPromise> mPendingAppend;
+ MozPromiseRequestHolder<SourceBufferTask::RangeRemovalPromise>
+ mPendingRemoval;
+ MediaContainerType mType;
+
+ RefPtr<TimeRanges> mBuffered;
+
+ MozPromiseRequestHolder<MediaSource::ActiveCompletionPromise>
+ mCompletionPromise;
+
+ // Only used if MSE v2 experimental mode is active.
+ // Contains the current Promise to be resolved following use of
+ // appendBufferAsync and removeAsync. Not set of no operation is pending.
+ RefPtr<Promise> mDOMPromise;
+};
+
+} // namespace dom
+
+} // namespace mozilla
+
+#endif /* mozilla_dom_SourceBuffer_h_ */
diff --git a/dom/media/mediasource/SourceBufferAttributes.h b/dom/media/mediasource/SourceBufferAttributes.h
new file mode 100644
index 0000000000..f15845b8a9
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferAttributes.h
@@ -0,0 +1,116 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_SourceBufferAttributes_h_
+#define mozilla_SourceBufferAttributes_h_
+
+#include "TimeUnits.h"
+#include "mozilla/dom/SourceBufferBinding.h"
+#include "mozilla/Maybe.h"
+
+namespace mozilla {
+
+class SourceBufferAttributes {
+ public:
+ // Current state as per Segment Parser Loop Algorithm
+ // http://w3c.github.io/media-source/index.html#sourcebuffer-segment-parser-loop
+ enum class AppendState {
+ WAITING_FOR_SEGMENT,
+ PARSING_INIT_SEGMENT,
+ PARSING_MEDIA_SEGMENT,
+ };
+
+ explicit SourceBufferAttributes(bool aGenerateTimestamp)
+ : mGenerateTimestamps(aGenerateTimestamp),
+ mAppendWindowStart(0),
+ mAppendWindowEnd(PositiveInfinity<double>()),
+ mAppendMode(dom::SourceBufferAppendMode::Segments),
+ mApparentTimestampOffset(0),
+ mAppendState(AppendState::WAITING_FOR_SEGMENT) {}
+
+ SourceBufferAttributes(const SourceBufferAttributes& aOther) = default;
+
+ double GetAppendWindowStart() const { return mAppendWindowStart; }
+
+ double GetAppendWindowEnd() const { return mAppendWindowEnd; }
+
+ void SetAppendWindowStart(double aWindowStart) {
+ mAppendWindowStart = aWindowStart;
+ }
+
+ void SetAppendWindowEnd(double aWindowEnd) { mAppendWindowEnd = aWindowEnd; }
+
+ double GetApparentTimestampOffset() const { return mApparentTimestampOffset; }
+
+ void SetApparentTimestampOffset(double aTimestampOffset) {
+ mApparentTimestampOffset = aTimestampOffset;
+ mTimestampOffset = media::TimeUnit::FromSeconds(aTimestampOffset);
+ }
+
+ media::TimeUnit GetTimestampOffset() const { return mTimestampOffset; }
+
+ void SetTimestampOffset(const media::TimeUnit& aTimestampOffset) {
+ mTimestampOffset = aTimestampOffset;
+ mApparentTimestampOffset = aTimestampOffset.ToSeconds();
+ }
+
+ dom::SourceBufferAppendMode GetAppendMode() const { return mAppendMode; }
+
+ void SetAppendMode(dom::SourceBufferAppendMode aAppendMode) {
+ mAppendMode = aAppendMode;
+ }
+
+ void SetGroupStartTimestamp(const media::TimeUnit& aGroupStartTimestamp) {
+ mGroupStartTimestamp = Some(aGroupStartTimestamp);
+ }
+
+ media::TimeUnit GetGroupStartTimestamp() const {
+ return mGroupStartTimestamp.ref();
+ }
+
+ bool HaveGroupStartTimestamp() const { return mGroupStartTimestamp.isSome(); }
+
+ void ResetGroupStartTimestamp() { mGroupStartTimestamp.reset(); }
+
+ void RestartGroupStartTimestamp() {
+ mGroupStartTimestamp = Some(mGroupEndTimestamp);
+ }
+
+ media::TimeUnit GetGroupEndTimestamp() const { return mGroupEndTimestamp; }
+
+ void SetGroupEndTimestamp(const media::TimeUnit& aGroupEndTimestamp) {
+ mGroupEndTimestamp = aGroupEndTimestamp;
+ }
+
+ AppendState GetAppendState() const { return mAppendState; }
+
+ void SetAppendState(AppendState aState) { mAppendState = aState; }
+
+ // mGenerateTimestamp isn't mutable once the source buffer has been
+ // constructed
+ bool mGenerateTimestamps;
+
+ SourceBufferAttributes& operator=(const SourceBufferAttributes& aOther) =
+ default;
+
+ private:
+ SourceBufferAttributes() = delete;
+
+ double mAppendWindowStart;
+ double mAppendWindowEnd;
+ dom::SourceBufferAppendMode mAppendMode;
+ double mApparentTimestampOffset;
+ media::TimeUnit mTimestampOffset;
+ Maybe<media::TimeUnit> mGroupStartTimestamp;
+ media::TimeUnit mGroupEndTimestamp;
+ // The current append state as per
+ // https://w3c.github.io/media-source/#sourcebuffer-append-state
+ AppendState mAppendState;
+};
+
+} // end namespace mozilla
+
+#endif /* mozilla_SourceBufferAttributes_h_ */
diff --git a/dom/media/mediasource/SourceBufferList.cpp b/dom/media/mediasource/SourceBufferList.cpp
new file mode 100644
index 0000000000..9a98f83a1c
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferList.cpp
@@ -0,0 +1,187 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "SourceBufferList.h"
+
+#include "AsyncEventRunner.h"
+#include "mozilla/ErrorResult.h"
+#include "mozilla/dom/SourceBufferListBinding.h"
+#include "mozilla/mozalloc.h"
+#include "nsCOMPtr.h"
+#include "nsIRunnable.h"
+#include "nsString.h"
+#include "nsThreadUtils.h"
+#include "mozilla/Logging.h"
+
+extern mozilla::LogModule* GetMediaSourceLog();
+extern mozilla::LogModule* GetMediaSourceAPILog();
+
+#define MSE_API(arg, ...) \
+ MOZ_LOG(GetMediaSourceAPILog(), mozilla::LogLevel::Debug, \
+ ("SourceBufferList(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define MSE_DEBUG(arg, ...) \
+ MOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Debug, \
+ ("SourceBufferList(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+struct JSContext;
+class JSObject;
+
+using TimeUnit = mozilla::media::TimeUnit;
+
+namespace mozilla::dom {
+
+SourceBufferList::~SourceBufferList() = default;
+
+SourceBuffer* SourceBufferList::IndexedGetter(uint32_t aIndex, bool& aFound) {
+ MOZ_ASSERT(NS_IsMainThread());
+ aFound = aIndex < mSourceBuffers.Length();
+
+ if (!aFound) {
+ return nullptr;
+ }
+ return mSourceBuffers[aIndex];
+}
+
+uint32_t SourceBufferList::Length() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return mSourceBuffers.Length();
+}
+
+void SourceBufferList::Append(SourceBuffer* aSourceBuffer) {
+ MOZ_ASSERT(NS_IsMainThread());
+ mSourceBuffers.AppendElement(aSourceBuffer);
+ QueueAsyncSimpleEvent("addsourcebuffer");
+}
+
+void SourceBufferList::AppendSimple(SourceBuffer* aSourceBuffer) {
+ MOZ_ASSERT(NS_IsMainThread());
+ mSourceBuffers.AppendElement(aSourceBuffer);
+}
+
+void SourceBufferList::Remove(SourceBuffer* aSourceBuffer) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ALWAYS_TRUE(mSourceBuffers.RemoveElement(aSourceBuffer));
+ aSourceBuffer->Detach();
+ QueueAsyncSimpleEvent("removesourcebuffer");
+}
+
+bool SourceBufferList::Contains(SourceBuffer* aSourceBuffer) {
+ MOZ_ASSERT(NS_IsMainThread());
+ return mSourceBuffers.Contains(aSourceBuffer);
+}
+
+void SourceBufferList::Clear() {
+ MOZ_ASSERT(NS_IsMainThread());
+ for (uint32_t i = 0; i < mSourceBuffers.Length(); ++i) {
+ mSourceBuffers[i]->Detach();
+ }
+ mSourceBuffers.Clear();
+ QueueAsyncSimpleEvent("removesourcebuffer");
+}
+
+void SourceBufferList::ClearSimple() {
+ MOZ_ASSERT(NS_IsMainThread());
+ mSourceBuffers.Clear();
+}
+
+bool SourceBufferList::IsEmpty() {
+ MOZ_ASSERT(NS_IsMainThread());
+ return mSourceBuffers.IsEmpty();
+}
+
+bool SourceBufferList::AnyUpdating() {
+ MOZ_ASSERT(NS_IsMainThread());
+ for (uint32_t i = 0; i < mSourceBuffers.Length(); ++i) {
+ if (mSourceBuffers[i]->Updating()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void SourceBufferList::RangeRemoval(double aStart, double aEnd) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("RangeRemoval(aStart=%f, aEnd=%f)", aStart, aEnd);
+ for (uint32_t i = 0; i < mSourceBuffers.Length(); ++i) {
+ mSourceBuffers[i]->RangeRemoval(aStart, aEnd);
+ }
+}
+
+void SourceBufferList::Ended() {
+ MOZ_ASSERT(NS_IsMainThread());
+ for (uint32_t i = 0; i < mSourceBuffers.Length(); ++i) {
+ mSourceBuffers[i]->Ended();
+ }
+}
+
+TimeUnit SourceBufferList::GetHighestBufferedEndTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ TimeUnit highestEndTime = TimeUnit::Zero();
+ for (uint32_t i = 0; i < mSourceBuffers.Length(); ++i) {
+ highestEndTime =
+ std::max(highestEndTime, mSourceBuffers[i]->GetBufferedEnd());
+ }
+ return highestEndTime;
+}
+
+void SourceBufferList::DispatchSimpleEvent(const char* aName) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_API("Dispatch event '%s'", aName);
+ DispatchTrustedEvent(NS_ConvertUTF8toUTF16(aName));
+}
+
+void SourceBufferList::QueueAsyncSimpleEvent(const char* aName) {
+ MSE_DEBUG("Queue event '%s'", aName);
+ nsCOMPtr<nsIRunnable> event =
+ new AsyncEventRunner<SourceBufferList>(this, aName);
+ mAbstractMainThread->Dispatch(event.forget());
+}
+
+SourceBufferList::SourceBufferList(MediaSource* aMediaSource)
+ : DOMEventTargetHelper(aMediaSource->GetParentObject()),
+ mMediaSource(aMediaSource),
+ mAbstractMainThread(mMediaSource->AbstractMainThread()) {
+ MOZ_ASSERT(aMediaSource);
+}
+
+MediaSource* SourceBufferList::GetParentObject() const { return mMediaSource; }
+
+TimeUnit SourceBufferList::HighestStartTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ TimeUnit highestStartTime = TimeUnit::Zero();
+ for (auto& sourceBuffer : mSourceBuffers) {
+ highestStartTime =
+ std::max(sourceBuffer->HighestStartTime(), highestStartTime);
+ }
+ return highestStartTime;
+}
+
+TimeUnit SourceBufferList::HighestEndTime() {
+ MOZ_ASSERT(NS_IsMainThread());
+ TimeUnit highestEndTime = TimeUnit::Zero();
+ for (auto& sourceBuffer : mSourceBuffers) {
+ highestEndTime = std::max(sourceBuffer->HighestEndTime(), highestEndTime);
+ }
+ return highestEndTime;
+}
+
+JSObject* SourceBufferList::WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) {
+ return SourceBufferList_Binding::Wrap(aCx, this, aGivenProto);
+}
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(SourceBufferList, DOMEventTargetHelper,
+ mMediaSource, mSourceBuffers)
+
+NS_IMPL_ADDREF_INHERITED(SourceBufferList, DOMEventTargetHelper)
+NS_IMPL_RELEASE_INHERITED(SourceBufferList, DOMEventTargetHelper)
+
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(SourceBufferList)
+NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
+
+#undef MSE_API
+#undef MSE_DEBUG
+} // namespace mozilla::dom
diff --git a/dom/media/mediasource/SourceBufferList.h b/dom/media/mediasource/SourceBufferList.h
new file mode 100644
index 0000000000..3779bf353a
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferList.h
@@ -0,0 +1,110 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_dom_SourceBufferList_h_
+#define mozilla_dom_SourceBufferList_h_
+
+#include "SourceBuffer.h"
+#include "js/RootingAPI.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/DOMEventTargetHelper.h"
+#include "nsCycleCollectionNoteChild.h"
+#include "nsCycleCollectionParticipant.h"
+#include "nsISupports.h"
+#include "nsTArray.h"
+
+struct JSContext;
+class JSObject;
+
+namespace mozilla {
+
+template <typename T>
+class AsyncEventRunner;
+
+namespace dom {
+
+class MediaSource;
+
+class SourceBufferList final : public DOMEventTargetHelper {
+ public:
+ /** WebIDL Methods. */
+ SourceBuffer* IndexedGetter(uint32_t aIndex, bool& aFound);
+
+ uint32_t Length();
+
+ IMPL_EVENT_HANDLER(addsourcebuffer);
+ IMPL_EVENT_HANDLER(removesourcebuffer);
+
+ /** End WebIDL methods. */
+
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(SourceBufferList,
+ DOMEventTargetHelper)
+
+ explicit SourceBufferList(MediaSource* aMediaSource);
+
+ MediaSource* GetParentObject() const;
+
+ JSObject* WrapObject(JSContext* aCx,
+ JS::Handle<JSObject*> aGivenProto) override;
+
+ // Append a SourceBuffer and fire "addsourcebuffer" at the list.
+ void Append(SourceBuffer* aSourceBuffer);
+
+ // Remove a SourceBuffer and fire "removesourcebuffer" at the list.
+ void Remove(SourceBuffer* aSourceBuffer);
+
+ // Returns true if aSourceBuffer is present in the list.
+ bool Contains(SourceBuffer* aSourceBuffer);
+
+ // Remove all SourceBuffers and fire a single "removesourcebuffer" at the
+ // list.
+ void Clear();
+
+ // True if list has zero entries.
+ bool IsEmpty();
+
+ // Returns true if updating is true on any SourceBuffers in the list.
+ bool AnyUpdating();
+
+ // Runs the range removal steps from the MSE specification on each
+ // SourceBuffer.
+ void RangeRemoval(double aStart, double aEnd);
+
+ // Mark all SourceBuffers input buffers as ended.
+ void Ended();
+
+ // Returns the highest end time of any of the Sourcebuffers.
+ media::TimeUnit GetHighestBufferedEndTime();
+
+ // Append a SourceBuffer to the list. No event is fired.
+ void AppendSimple(SourceBuffer* aSourceBuffer);
+
+ // Remove all SourceBuffers from mSourceBuffers.
+ // No event is fired and no action is performed on the sourcebuffers.
+ void ClearSimple();
+
+ media::TimeUnit HighestStartTime();
+ media::TimeUnit HighestEndTime();
+
+ private:
+ ~SourceBufferList();
+
+ friend class AsyncEventRunner<SourceBufferList>;
+ void DispatchSimpleEvent(const char* aName);
+ void QueueAsyncSimpleEvent(const char* aName);
+
+ RefPtr<MediaSource> mMediaSource;
+ nsTArray<RefPtr<SourceBuffer> > mSourceBuffers;
+ const RefPtr<AbstractThread> mAbstractMainThread;
+};
+
+} // namespace dom
+
+} // namespace mozilla
+
+#endif /* mozilla_dom_SourceBufferList_h_ */
diff --git a/dom/media/mediasource/SourceBufferResource.cpp b/dom/media/mediasource/SourceBufferResource.cpp
new file mode 100644
index 0000000000..49447be015
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferResource.cpp
@@ -0,0 +1,144 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "SourceBufferResource.h"
+
+#include "mozilla/Logging.h"
+#include "mozilla/TaskQueue.h"
+#include "MediaData.h"
+
+mozilla::LogModule* GetSourceBufferResourceLog() {
+ static mozilla::LazyLogModule sLogModule("SourceBufferResource");
+ return sLogModule;
+}
+
+#define SBR_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetSourceBufferResourceLog(), mozilla::LogLevel::Debug, \
+ "::%s: " arg, __func__, ##__VA_ARGS__)
+#define SBR_DEBUGV(arg, ...) \
+ DDMOZ_LOG(GetSourceBufferResourceLog(), mozilla::LogLevel::Verbose, \
+ "::%s: " arg, __func__, ##__VA_ARGS__)
+
+namespace mozilla {
+
+RefPtr<GenericPromise> SourceBufferResource::Close() {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("Close");
+ mClosed = true;
+ return GenericPromise::CreateAndResolve(true, __func__);
+}
+
+nsresult SourceBufferResource::ReadAt(int64_t aOffset, char* aBuffer,
+ uint32_t aCount, uint32_t* aBytes) {
+ SBR_DEBUG("ReadAt(aOffset=%" PRId64 ", aBuffer=%p, aCount=%u, aBytes=%p)",
+ aOffset, aBytes, aCount, aBytes);
+ return ReadAtInternal(aOffset, aBuffer, aCount, aBytes);
+}
+
+nsresult SourceBufferResource::ReadAtInternal(int64_t aOffset, char* aBuffer,
+ uint32_t aCount,
+ uint32_t* aBytes) {
+ MOZ_ASSERT(OnThread());
+
+ if (mClosed || aOffset < 0 || uint64_t(aOffset) < mInputBuffer.GetOffset() ||
+ aOffset > GetLength()) {
+ return NS_ERROR_FAILURE;
+ }
+
+ uint32_t available = GetLength() - aOffset;
+ uint32_t count = std::min(aCount, available);
+
+ SBR_DEBUGV("offset=%" PRId64 " GetLength()=%" PRId64
+ " available=%u count=%u mEnded=%d",
+ aOffset, GetLength(), available, count, mEnded);
+ if (available == 0) {
+ SBR_DEBUGV("reached EOF");
+ *aBytes = 0;
+ return NS_OK;
+ }
+
+ mInputBuffer.CopyData(aOffset, count, aBuffer);
+ *aBytes = count;
+
+ return NS_OK;
+}
+
+nsresult SourceBufferResource::ReadFromCache(char* aBuffer, int64_t aOffset,
+ uint32_t aCount) {
+ SBR_DEBUG("ReadFromCache(aBuffer=%p, aOffset=%" PRId64 ", aCount=%u)",
+ aBuffer, aOffset, aCount);
+ uint32_t bytesRead;
+ nsresult rv = ReadAtInternal(aOffset, aBuffer, aCount, &bytesRead);
+ NS_ENSURE_SUCCESS(rv, rv);
+
+ // ReadFromCache return failure if not all the data is cached.
+ return bytesRead == aCount ? NS_OK : NS_ERROR_FAILURE;
+}
+
+uint32_t SourceBufferResource::EvictData(uint64_t aPlaybackOffset,
+ int64_t aThreshold) {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("EvictData(aPlaybackOffset=%" PRIu64
+ ","
+ "aThreshold=%" PRId64 ")",
+ aPlaybackOffset, aThreshold);
+ uint32_t result = mInputBuffer.Evict(aPlaybackOffset, aThreshold);
+ return result;
+}
+
+void SourceBufferResource::EvictBefore(uint64_t aOffset) {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("EvictBefore(aOffset=%" PRIu64 ")", aOffset);
+
+ mInputBuffer.EvictBefore(aOffset);
+}
+
+uint32_t SourceBufferResource::EvictAll() {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("EvictAll()");
+ return mInputBuffer.EvictAll();
+}
+
+void SourceBufferResource::AppendData(MediaByteBuffer* aData) {
+ AppendData(MediaSpan(aData));
+}
+
+void SourceBufferResource::AppendData(const MediaSpan& aData) {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("AppendData(aData=%p, aLength=%zu)", aData.Elements(),
+ aData.Length());
+ mInputBuffer.AppendItem(aData);
+ mEnded = false;
+}
+
+void SourceBufferResource::Ended() {
+ MOZ_ASSERT(OnThread());
+ SBR_DEBUG("");
+ mEnded = true;
+}
+
+SourceBufferResource::~SourceBufferResource() { SBR_DEBUG(""); }
+
+SourceBufferResource::SourceBufferResource()
+#if defined(DEBUG)
+ : mThread(AbstractThread::GetCurrent())
+#endif
+{
+ SBR_DEBUG("");
+}
+
+#if defined(DEBUG)
+const AbstractThread* SourceBufferResource::GetThread() const {
+ return mThread;
+}
+bool SourceBufferResource::OnThread() const {
+ return !GetThread() || GetThread()->IsCurrentThreadIn();
+}
+#endif
+
+#undef SBR_DEBUG
+#undef SBR_DEBUGV
+} // namespace mozilla
diff --git a/dom/media/mediasource/SourceBufferResource.h b/dom/media/mediasource/SourceBufferResource.h
new file mode 100644
index 0000000000..b117edb558
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferResource.h
@@ -0,0 +1,143 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_SOURCEBUFFERRESOURCE_H_
+#define MOZILLA_SOURCEBUFFERRESOURCE_H_
+
+#include "mozilla/AbstractThread.h"
+#include "mozilla/Logging.h"
+#include "MediaResource.h"
+#include "ResourceQueue.h"
+
+#define UNIMPLEMENTED() \
+ { /* Logging this is too spammy to do by default */ \
+ }
+
+namespace mozilla {
+
+class MediaByteBuffer;
+class AbstractThread;
+
+namespace dom {
+
+class SourceBuffer;
+
+} // namespace dom
+
+DDLoggedTypeDeclNameAndBase(SourceBufferResource, MediaResource);
+
+// SourceBufferResource is not thread safe.
+class SourceBufferResource final
+ : public MediaResource,
+ public DecoderDoctorLifeLogger<SourceBufferResource> {
+ public:
+ SourceBufferResource();
+ RefPtr<GenericPromise> Close() override;
+ nsresult ReadAt(int64_t aOffset, char* aBuffer, uint32_t aCount,
+ uint32_t* aBytes) override;
+ // Memory-based and no locks, caching discouraged.
+ bool ShouldCacheReads() override { return false; }
+ void Pin() override { UNIMPLEMENTED(); }
+ void Unpin() override { UNIMPLEMENTED(); }
+ int64_t GetLength() override { return mInputBuffer.GetLength(); }
+ int64_t GetNextCachedData(int64_t aOffset) override {
+ MOZ_ASSERT(OnThread());
+ MOZ_ASSERT(aOffset >= 0);
+ if (uint64_t(aOffset) < mInputBuffer.GetOffset()) {
+ return mInputBuffer.GetOffset();
+ } else if (aOffset == GetLength()) {
+ return -1;
+ }
+ return aOffset;
+ }
+ int64_t GetCachedDataEnd(int64_t aOffset) override {
+ MOZ_ASSERT(OnThread());
+ MOZ_ASSERT(aOffset >= 0);
+ if (uint64_t(aOffset) < mInputBuffer.GetOffset() ||
+ aOffset >= GetLength()) {
+ // aOffset is outside of the buffered range.
+ return aOffset;
+ }
+ return GetLength();
+ }
+ bool IsDataCachedToEndOfResource(int64_t aOffset) override { return false; }
+ nsresult ReadFromCache(char* aBuffer, int64_t aOffset,
+ uint32_t aCount) override;
+
+ nsresult GetCachedRanges(MediaByteRangeSet& aRanges) override {
+ MOZ_ASSERT(OnThread());
+ if (mInputBuffer.GetLength()) {
+ aRanges +=
+ MediaByteRange(mInputBuffer.GetOffset(), mInputBuffer.GetLength());
+ }
+ return NS_OK;
+ }
+
+ size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
+ MOZ_ASSERT(OnThread());
+ return mInputBuffer.SizeOfExcludingThis(aMallocSizeOf);
+ }
+
+ size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
+ return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
+ }
+
+ // Used by SourceBuffer.
+ void AppendData(MediaByteBuffer* aData);
+ void AppendData(const MediaSpan& aData);
+ void Ended();
+ bool IsEnded() {
+ MOZ_ASSERT(OnThread());
+ return mEnded;
+ }
+ // Remove data from resource if it holds more than the threshold reduced by
+ // the given number of bytes. Returns amount evicted.
+ uint32_t EvictData(uint64_t aPlaybackOffset, int64_t aThresholdReduct);
+
+ // Remove data from resource before the given offset.
+ void EvictBefore(uint64_t aOffset);
+
+ // Remove all data from the resource
+ uint32_t EvictAll();
+
+ // Returns the amount of data currently retained by this resource.
+ int64_t GetSize() {
+ MOZ_ASSERT(OnThread());
+ return mInputBuffer.GetLength() - mInputBuffer.GetOffset();
+ }
+
+ const uint8_t* GetContiguousAccess(int64_t aOffset, size_t aSize) {
+ return mInputBuffer.GetContiguousAccess(aOffset, aSize);
+ }
+
+#if defined(DEBUG)
+ void Dump(const char* aPath) { mInputBuffer.Dump(aPath); }
+#endif
+
+ private:
+ virtual ~SourceBufferResource();
+ nsresult ReadAtInternal(int64_t aOffset, char* aBuffer, uint32_t aCount,
+ uint32_t* aBytes);
+
+#if defined(DEBUG)
+ const RefPtr<AbstractThread> mThread;
+ // TaskQueue methods and objects.
+ const AbstractThread* GetThread() const;
+ bool OnThread() const;
+#endif
+
+ // The buffer holding resource data.
+ ResourceQueue mInputBuffer;
+
+ bool mClosed = false;
+ bool mEnded = false;
+};
+
+} // namespace mozilla
+
+#undef UNIMPLEMENTED
+
+#endif /* MOZILLA_SOURCEBUFFERRESOURCE_H_ */
diff --git a/dom/media/mediasource/SourceBufferTask.h b/dom/media/mediasource/SourceBufferTask.h
new file mode 100644
index 0000000000..34ccba1426
--- /dev/null
+++ b/dom/media/mediasource/SourceBufferTask.h
@@ -0,0 +1,126 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_SOURCEBUFFERTASK_H_
+#define MOZILLA_SOURCEBUFFERTASK_H_
+
+#include "mozilla/MozPromise.h"
+#include "SourceBufferAttributes.h"
+#include "TimeUnits.h"
+#include "MediaResult.h"
+
+#include <utility>
+
+namespace mozilla {
+
+class SourceBufferTask {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(SourceBufferTask);
+ enum class Type {
+ AppendBuffer,
+ Abort,
+ Reset,
+ RangeRemoval,
+ EvictData,
+ Detach,
+ ChangeType
+ };
+
+ typedef std::pair<bool, SourceBufferAttributes> AppendBufferResult;
+ typedef MozPromise<AppendBufferResult, MediaResult, /* IsExclusive = */ true>
+ AppendPromise;
+ typedef MozPromise<bool, nsresult, /* IsExclusive = */ true>
+ RangeRemovalPromise;
+
+ virtual Type GetType() const = 0;
+ virtual const char* GetTypeName() const = 0;
+
+ template <typename ReturnType>
+ ReturnType* As() {
+ MOZ_ASSERT(this->GetType() == ReturnType::sType);
+ return static_cast<ReturnType*>(this);
+ }
+
+ protected:
+ virtual ~SourceBufferTask() = default;
+};
+
+class AppendBufferTask : public SourceBufferTask {
+ public:
+ AppendBufferTask(already_AddRefed<MediaByteBuffer> aData,
+ const SourceBufferAttributes& aAttributes)
+ : mBuffer(aData), mAttributes(aAttributes) {}
+
+ static const Type sType = Type::AppendBuffer;
+ Type GetType() const override { return Type::AppendBuffer; }
+ const char* GetTypeName() const override { return "AppendBuffer"; }
+
+ RefPtr<MediaByteBuffer> mBuffer;
+ SourceBufferAttributes mAttributes;
+ MozPromiseHolder<AppendPromise> mPromise;
+};
+
+class AbortTask : public SourceBufferTask {
+ public:
+ static const Type sType = Type::Abort;
+ Type GetType() const override { return Type::Abort; }
+ const char* GetTypeName() const override { return "Abort"; }
+};
+
+class ResetTask : public SourceBufferTask {
+ public:
+ static const Type sType = Type::Reset;
+ Type GetType() const override { return Type::Reset; }
+ const char* GetTypeName() const override { return "Reset"; }
+};
+
+class RangeRemovalTask : public SourceBufferTask {
+ public:
+ explicit RangeRemovalTask(const media::TimeInterval& aRange)
+ : mRange(aRange) {}
+
+ static const Type sType = Type::RangeRemoval;
+ Type GetType() const override { return Type::RangeRemoval; }
+ const char* GetTypeName() const override { return "RangeRemoval"; }
+
+ media::TimeInterval mRange;
+ MozPromiseHolder<RangeRemovalPromise> mPromise;
+};
+
+class EvictDataTask : public SourceBufferTask {
+ public:
+ EvictDataTask(const media::TimeUnit& aPlaybackTime, int64_t aSizetoEvict)
+ : mPlaybackTime(aPlaybackTime), mSizeToEvict(aSizetoEvict) {}
+
+ static const Type sType = Type::EvictData;
+ Type GetType() const override { return Type::EvictData; }
+ const char* GetTypeName() const override { return "EvictData"; }
+
+ media::TimeUnit mPlaybackTime;
+ int64_t mSizeToEvict;
+};
+
+class DetachTask : public SourceBufferTask {
+ public:
+ static const Type sType = Type::Detach;
+ Type GetType() const override { return Type::Detach; }
+ const char* GetTypeName() const override { return "Detach"; }
+};
+
+class ChangeTypeTask : public SourceBufferTask {
+ public:
+ explicit ChangeTypeTask(const MediaContainerType& aType) : mType(aType) {}
+
+ static const Type sType = Type::ChangeType;
+ Type GetType() const override { return Type::ChangeType; }
+ const char* GetTypeName() const override { return "ChangeType"; }
+
+ const MediaContainerType mType;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/mediasource/TrackBuffersManager.cpp b/dom/media/mediasource/TrackBuffersManager.cpp
new file mode 100644
index 0000000000..779e1bd9d1
--- /dev/null
+++ b/dom/media/mediasource/TrackBuffersManager.cpp
@@ -0,0 +1,3092 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "TrackBuffersManager.h"
+#include "ContainerParser.h"
+#include "MediaSourceDemuxer.h"
+#include "MediaSourceUtils.h"
+#include "SourceBuffer.h"
+#include "SourceBufferResource.h"
+#include "SourceBufferTask.h"
+#include "WebMDemuxer.h"
+#include "mozilla/ErrorResult.h"
+#include "mozilla/Preferences.h"
+#include "mozilla/ProfilerLabels.h"
+#include "mozilla/ProfilerMarkers.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "nsMimeTypes.h"
+
+#ifdef MOZ_FMP4
+# include "MP4Demuxer.h"
+#endif
+
+#include <limits>
+
+extern mozilla::LogModule* GetMediaSourceLog();
+
+#define MSE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Debug, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+#define MSE_DEBUGV(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceLog(), mozilla::LogLevel::Verbose, "::%s: " arg, \
+ __func__, ##__VA_ARGS__)
+
+mozilla::LogModule* GetMediaSourceSamplesLog() {
+ static mozilla::LazyLogModule sLogModule("MediaSourceSamples");
+ return sLogModule;
+}
+#define SAMPLE_DEBUG(arg, ...) \
+ DDMOZ_LOG(GetMediaSourceSamplesLog(), mozilla::LogLevel::Debug, \
+ "::%s: " arg, __func__, ##__VA_ARGS__)
+
+namespace mozilla {
+
+using dom::SourceBufferAppendMode;
+using media::TimeInterval;
+using media::TimeIntervals;
+using media::TimeUnit;
+typedef SourceBufferTask::AppendBufferResult AppendBufferResult;
+typedef SourceBufferAttributes::AppendState AppendState;
+
+static const char* AppendStateToStr(AppendState aState) {
+ switch (aState) {
+ case AppendState::WAITING_FOR_SEGMENT:
+ return "WAITING_FOR_SEGMENT";
+ case AppendState::PARSING_INIT_SEGMENT:
+ return "PARSING_INIT_SEGMENT";
+ case AppendState::PARSING_MEDIA_SEGMENT:
+ return "PARSING_MEDIA_SEGMENT";
+ default:
+ return "IMPOSSIBLE";
+ }
+}
+
+static Atomic<uint32_t> sStreamSourceID(0u);
+
+class DispatchKeyNeededEvent : public Runnable {
+ public:
+ DispatchKeyNeededEvent(MediaSourceDecoder* aDecoder,
+ const nsTArray<uint8_t>& aInitData,
+ const nsString& aInitDataType)
+ : Runnable("DispatchKeyNeededEvent"),
+ mDecoder(aDecoder),
+ mInitData(aInitData.Clone()),
+ mInitDataType(aInitDataType) {}
+ NS_IMETHOD Run() override {
+ // Note: Null check the owner, as the decoder could have been shutdown
+ // since this event was dispatched.
+ MediaDecoderOwner* owner = mDecoder->GetOwner();
+ if (owner) {
+ owner->DispatchEncrypted(mInitData, mInitDataType);
+ }
+ mDecoder = nullptr;
+ return NS_OK;
+ }
+
+ private:
+ RefPtr<MediaSourceDecoder> mDecoder;
+ nsTArray<uint8_t> mInitData;
+ nsString mInitDataType;
+};
+
+TrackBuffersManager::TrackBuffersManager(MediaSourceDecoder* aParentDecoder,
+ const MediaContainerType& aType)
+ : mBufferFull(false),
+ mFirstInitializationSegmentReceived(false),
+ mChangeTypeReceived(false),
+ mNewMediaSegmentStarted(false),
+ mActiveTrack(false),
+ mType(aType),
+ mParser(ContainerParser::CreateForMIMEType(aType)),
+ mProcessedInput(0),
+ mParentDecoder(new nsMainThreadPtrHolder<MediaSourceDecoder>(
+ "TrackBuffersManager::mParentDecoder", aParentDecoder,
+ false /* strict */)),
+ mAbstractMainThread(aParentDecoder->AbstractMainThread()),
+ mEnded(false),
+ mVideoEvictionThreshold(Preferences::GetUint(
+ "media.mediasource.eviction_threshold.video", 100 * 1024 * 1024)),
+ mAudioEvictionThreshold(Preferences::GetUint(
+ "media.mediasource.eviction_threshold.audio", 20 * 1024 * 1024)),
+ mEvictionState(EvictionState::NO_EVICTION_NEEDED),
+ mMutex("TrackBuffersManager"),
+ mTaskQueue(aParentDecoder->GetDemuxer()->GetTaskQueue()),
+ mTaskQueueCapability(Some(EventTargetCapability{mTaskQueue.get()})) {
+ MOZ_ASSERT(NS_IsMainThread(), "Must be instanciated on the main thread");
+ DDLINKCHILD("parser", mParser.get());
+}
+
+TrackBuffersManager::~TrackBuffersManager() { ShutdownDemuxers(); }
+
+RefPtr<TrackBuffersManager::AppendPromise> TrackBuffersManager::AppendData(
+ already_AddRefed<MediaByteBuffer> aData,
+ const SourceBufferAttributes& aAttributes) {
+ MOZ_ASSERT(NS_IsMainThread());
+ RefPtr<MediaByteBuffer> data(aData);
+ MSE_DEBUG("Appending %zu bytes", data->Length());
+
+ mEnded = false;
+
+ return InvokeAsync(static_cast<AbstractThread*>(GetTaskQueueSafe().get()),
+ this, __func__, &TrackBuffersManager::DoAppendData,
+ data.forget(), aAttributes);
+}
+
+RefPtr<TrackBuffersManager::AppendPromise> TrackBuffersManager::DoAppendData(
+ already_AddRefed<MediaByteBuffer> aData,
+ const SourceBufferAttributes& aAttributes) {
+ RefPtr<AppendBufferTask> task =
+ new AppendBufferTask(std::move(aData), aAttributes);
+ RefPtr<AppendPromise> p = task->mPromise.Ensure(__func__);
+ QueueTask(task);
+
+ return p;
+}
+
+void TrackBuffersManager::QueueTask(SourceBufferTask* aTask) {
+ // The source buffer is a wrapped native, it would be unlinked twice and so
+ // the TrackBuffersManager::Detach() would also be called twice. Since the
+ // detach task has been done before, we could ignore this task.
+ RefPtr<TaskQueue> taskQueue = GetTaskQueueSafe();
+ if (!taskQueue) {
+ MOZ_ASSERT(aTask->GetType() == SourceBufferTask::Type::Detach,
+ "only detach task could happen here!");
+ MSE_DEBUG("Could not queue the task '%s' without task queue",
+ aTask->GetTypeName());
+ return;
+ }
+
+ if (!taskQueue->IsCurrentThreadIn()) {
+ nsresult rv =
+ taskQueue->Dispatch(NewRunnableMethod<RefPtr<SourceBufferTask>>(
+ "TrackBuffersManager::QueueTask", this,
+ &TrackBuffersManager::QueueTask, aTask));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ mQueue.Push(aTask);
+ ProcessTasks();
+}
+
+void TrackBuffersManager::ProcessTasks() {
+ // ProcessTask is always called OnTaskQueue, however it is possible that it is
+ // called once again after a first Detach task has run, in which case
+ // mTaskQueue would be null.
+ // This can happen under two conditions:
+ // 1- Two Detach tasks were queued in a row due to a double cycle collection.
+ // 2- An call to ProcessTasks() had queued another run of ProcessTasks while
+ // a Detach task is pending.
+ // We handle these two cases by aborting early.
+ // A second Detach task was queued, prior the first one running, ignore it.
+ if (!mTaskQueue) {
+ RefPtr<SourceBufferTask> task = mQueue.Pop();
+ if (!task) {
+ return;
+ }
+ MOZ_RELEASE_ASSERT(task->GetType() == SourceBufferTask::Type::Detach,
+ "only detach task could happen here!");
+ MSE_DEBUG("Could not process the task '%s' after detached",
+ task->GetTypeName());
+ return;
+ }
+
+ mTaskQueueCapability->AssertOnCurrentThread();
+ typedef SourceBufferTask::Type Type;
+
+ if (mCurrentTask) {
+ // Already have a task pending. ProcessTask will be scheduled once the
+ // current task complete.
+ return;
+ }
+ RefPtr<SourceBufferTask> task = mQueue.Pop();
+ if (!task) {
+ // nothing to do.
+ return;
+ }
+
+ MSE_DEBUG("Process task '%s'", task->GetTypeName());
+ switch (task->GetType()) {
+ case Type::AppendBuffer:
+ mCurrentTask = task;
+ if (!mInputBuffer || mInputBuffer->IsEmpty()) {
+ // Note: we reset mInputBuffer here to ensure it doesn't grow unbounded.
+ mInputBuffer.reset();
+ mInputBuffer = Some(MediaSpan(task->As<AppendBufferTask>()->mBuffer));
+ } else {
+ // mInputBuffer wasn't empty, so we can't just reset it, but we move
+ // the data into a new buffer to clear out data no longer in the span.
+ MSE_DEBUG(
+ "mInputBuffer not empty during append -- data will be copied to "
+ "new buffer. mInputBuffer->Length()=%zu "
+ "mInputBuffer->Buffer()->Length()=%zu",
+ mInputBuffer->Length(), mInputBuffer->Buffer()->Length());
+ const RefPtr<MediaByteBuffer> newBuffer{new MediaByteBuffer()};
+ // Set capacity outside of ctor to let us explicitly handle OOM.
+ const size_t newCapacity =
+ mInputBuffer->Length() +
+ task->As<AppendBufferTask>()->mBuffer->Length();
+ if (!newBuffer->SetCapacity(newCapacity, fallible)) {
+ RejectAppend(NS_ERROR_OUT_OF_MEMORY, __func__);
+ return;
+ }
+ // Use infallible appends as we've already set capacity above.
+ newBuffer->AppendElements(mInputBuffer->Elements(),
+ mInputBuffer->Length());
+ newBuffer->AppendElements(*task->As<AppendBufferTask>()->mBuffer);
+ mInputBuffer = Some(MediaSpan(newBuffer));
+ }
+ mSourceBufferAttributes = MakeUnique<SourceBufferAttributes>(
+ task->As<AppendBufferTask>()->mAttributes);
+ mAppendWindow = TimeInterval(
+ TimeUnit::FromSeconds(
+ mSourceBufferAttributes->GetAppendWindowStart()),
+ TimeUnit::FromSeconds(mSourceBufferAttributes->GetAppendWindowEnd()));
+ ScheduleSegmentParserLoop();
+ break;
+ case Type::RangeRemoval: {
+ bool rv = CodedFrameRemoval(task->As<RangeRemovalTask>()->mRange);
+ task->As<RangeRemovalTask>()->mPromise.Resolve(rv, __func__);
+ break;
+ }
+ case Type::EvictData:
+ DoEvictData(task->As<EvictDataTask>()->mPlaybackTime,
+ task->As<EvictDataTask>()->mSizeToEvict);
+ break;
+ case Type::Abort:
+ // not handled yet, and probably never.
+ break;
+ case Type::Reset:
+ CompleteResetParserState();
+ break;
+ case Type::Detach:
+ mCurrentInputBuffer = nullptr;
+ MOZ_DIAGNOSTIC_ASSERT(mQueue.Length() == 0,
+ "Detach task must be the last");
+ mVideoTracks.Reset();
+ mAudioTracks.Reset();
+ ShutdownDemuxers();
+ ResetTaskQueue();
+ return;
+ case Type::ChangeType:
+ MOZ_RELEASE_ASSERT(!mCurrentTask);
+ MSE_DEBUG("Processing type change from %s -> %s",
+ mType.OriginalString().get(),
+ task->As<ChangeTypeTask>()->mType.OriginalString().get());
+ mType = task->As<ChangeTypeTask>()->mType;
+ mChangeTypeReceived = true;
+ mInitData = nullptr;
+ // A new input buffer will be created once we receive a new init segment.
+ // The first segment received after a changeType call must be an init
+ // segment.
+ mCurrentInputBuffer = nullptr;
+ CompleteResetParserState();
+ break;
+ default:
+ NS_WARNING("Invalid Task");
+ }
+ TaskQueueFromTaskQueue()->Dispatch(
+ NewRunnableMethod("TrackBuffersManager::ProcessTasks", this,
+ &TrackBuffersManager::ProcessTasks));
+}
+
+// The MSE spec requires that we abort the current SegmentParserLoop
+// which is then followed by a call to ResetParserState.
+// However due to our asynchronous design this causes inherent difficulties.
+// As the spec behaviour is non deterministic anyway, we instead process all
+// pending frames found in the input buffer.
+void TrackBuffersManager::AbortAppendData() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("");
+
+ QueueTask(new AbortTask());
+}
+
+void TrackBuffersManager::ResetParserState(
+ SourceBufferAttributes& aAttributes) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("");
+
+ // Spec states:
+ // 1. If the append state equals PARSING_MEDIA_SEGMENT and the input buffer
+ // contains some complete coded frames, then run the coded frame processing
+ // algorithm until all of these complete coded frames have been processed.
+ // However, we will wait until all coded frames have been processed regardless
+ // of the value of append state.
+ QueueTask(new ResetTask());
+
+ // ResetParserState has some synchronous steps that much be performed now.
+ // The remaining steps will be performed once the ResetTask gets executed.
+
+ // 6. If the mode attribute equals "sequence", then set the group start
+ // timestamp to the group end timestamp
+ if (aAttributes.GetAppendMode() == SourceBufferAppendMode::Sequence) {
+ aAttributes.SetGroupStartTimestamp(aAttributes.GetGroupEndTimestamp());
+ }
+ // 8. Set append state to WAITING_FOR_SEGMENT.
+ aAttributes.SetAppendState(AppendState::WAITING_FOR_SEGMENT);
+}
+
+RefPtr<TrackBuffersManager::RangeRemovalPromise>
+TrackBuffersManager::RangeRemoval(TimeUnit aStart, TimeUnit aEnd) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("From %.2f to %.2f", aStart.ToSeconds(), aEnd.ToSeconds());
+
+ mEnded = false;
+
+ return InvokeAsync(static_cast<AbstractThread*>(GetTaskQueueSafe().get()),
+ this, __func__,
+ &TrackBuffersManager::CodedFrameRemovalWithPromise,
+ TimeInterval(aStart, aEnd));
+}
+
+TrackBuffersManager::EvictDataResult TrackBuffersManager::EvictData(
+ const TimeUnit& aPlaybackTime, int64_t aSize) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (aSize > EvictionThreshold()) {
+ // We're adding more data than we can hold.
+ return EvictDataResult::BUFFER_FULL;
+ }
+ const int64_t toEvict = GetSize() + aSize - EvictionThreshold();
+
+ const uint32_t canEvict =
+ Evictable(HasVideo() ? TrackInfo::kVideoTrack : TrackInfo::kAudioTrack);
+
+ MSE_DEBUG("currentTime=%" PRId64 " buffered=%" PRId64
+ "kB, eviction threshold=%" PRId64
+ "kB, "
+ "evict=%" PRId64 "kB canevict=%" PRIu32 "kB",
+ aPlaybackTime.ToMicroseconds(), GetSize() / 1024,
+ EvictionThreshold() / 1024, toEvict / 1024, canEvict / 1024);
+
+ if (toEvict <= 0) {
+ mEvictionState = EvictionState::NO_EVICTION_NEEDED;
+ return EvictDataResult::NO_DATA_EVICTED;
+ }
+
+ EvictDataResult result;
+
+ if (mBufferFull && mEvictionState == EvictionState::EVICTION_COMPLETED &&
+ canEvict < uint32_t(toEvict)) {
+ // Our buffer is currently full. We will make another eviction attempt.
+ // However, the current appendBuffer will fail as we can't know ahead of
+ // time if the eviction will later succeed.
+ result = EvictDataResult::BUFFER_FULL;
+ } else {
+ mEvictionState = EvictionState::EVICTION_NEEDED;
+ result = EvictDataResult::NO_DATA_EVICTED;
+ }
+ MSE_DEBUG("Reached our size limit, schedule eviction of %" PRId64
+ " bytes (%s)",
+ toEvict,
+ result == EvictDataResult::BUFFER_FULL ? "buffer full"
+ : "no data evicted");
+ QueueTask(new EvictDataTask(aPlaybackTime, toEvict));
+
+ return result;
+}
+
+void TrackBuffersManager::ChangeType(const MediaContainerType& aType) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ QueueTask(new ChangeTypeTask(aType));
+}
+
+TimeIntervals TrackBuffersManager::Buffered() const {
+ MSE_DEBUG("");
+
+ // http://w3c.github.io/media-source/index.html#widl-SourceBuffer-buffered
+
+ MutexAutoLock mut(mMutex);
+ nsTArray<const TimeIntervals*> tracks;
+ if (HasVideo()) {
+ tracks.AppendElement(&mVideoBufferedRanges);
+ }
+ if (HasAudio()) {
+ tracks.AppendElement(&mAudioBufferedRanges);
+ }
+
+ // 2. Let highest end time be the largest track buffer ranges end time across
+ // all the track buffers managed by this SourceBuffer object.
+ TimeUnit highestEndTime = HighestEndTime(tracks);
+
+ // 3. Let intersection ranges equal a TimeRange object containing a single
+ // range from 0 to highest end time.
+ TimeIntervals intersection{
+ TimeInterval(TimeUnit::FromSeconds(0), highestEndTime)};
+
+ // 4. For each track buffer managed by this SourceBuffer, run the following
+ // steps:
+ // 1. Let track ranges equal the track buffer ranges for the current track
+ // buffer.
+ for (const TimeIntervals* trackRanges : tracks) {
+ // 2. If readyState is "ended", then set the end time on the last range in
+ // track ranges to highest end time.
+ // 3. Let new intersection ranges equal the intersection between the
+ // intersection ranges and the track ranges.
+ if (mEnded) {
+ TimeIntervals tR = *trackRanges;
+ tR.Add(TimeInterval(tR.GetEnd(), highestEndTime));
+ intersection.Intersection(tR);
+ } else {
+ intersection.Intersection(*trackRanges);
+ }
+ }
+ return intersection;
+}
+
+int64_t TrackBuffersManager::GetSize() const { return mSizeSourceBuffer; }
+
+void TrackBuffersManager::Ended() { mEnded = true; }
+
+void TrackBuffersManager::Detach() {
+ MOZ_ASSERT(NS_IsMainThread());
+ MSE_DEBUG("");
+ QueueTask(new DetachTask());
+}
+
+void TrackBuffersManager::CompleteResetParserState() {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ AUTO_PROFILER_LABEL("TrackBuffersManager::CompleteResetParserState",
+ MEDIA_PLAYBACK);
+ MSE_DEBUG("");
+
+ // We shouldn't change mInputDemuxer while a demuxer init/reset request is
+ // being processed. See bug 1239983.
+ MOZ_DIAGNOSTIC_ASSERT(!mDemuxerInitRequest.Exists(),
+ "Previous AppendBuffer didn't complete");
+
+ for (auto& track : GetTracksList()) {
+ // 2. Unset the last decode timestamp on all track buffers.
+ // 3. Unset the last frame duration on all track buffers.
+ // 4. Unset the highest end timestamp on all track buffers.
+ // 5. Set the need random access point flag on all track buffers to true.
+ track->ResetAppendState();
+
+ // if we have been aborted, we may have pending frames that we are going
+ // to discard now.
+ track->mQueuedSamples.Clear();
+ }
+
+ // 7. Remove all bytes from the input buffer.
+ mPendingInputBuffer.reset();
+ mInputBuffer.reset();
+ if (mCurrentInputBuffer) {
+ mCurrentInputBuffer->EvictAll();
+ // The demuxer will be recreated during the next run of SegmentParserLoop.
+ // As such we don't need to notify it that data has been removed.
+ mCurrentInputBuffer = new SourceBufferResource();
+ }
+
+ // We could be left with a demuxer in an unusable state. It needs to be
+ // recreated. Unless we have a pending changeType operation, we store in the
+ // InputBuffer an init segment which will be parsed during the next Segment
+ // Parser Loop and a new demuxer will be created and initialized.
+ // If we are in the middle of a changeType operation, then we do not have an
+ // init segment yet. The next appendBuffer operation will need to provide such
+ // init segment.
+ if (mFirstInitializationSegmentReceived && !mChangeTypeReceived) {
+ MOZ_ASSERT(mInitData && mInitData->Length(),
+ "we must have an init segment");
+ // The aim here is really to destroy our current demuxer.
+ CreateDemuxerforMIMEType();
+ // Recreate our input buffer. We can't directly assign the initData buffer
+ // to mInputBuffer as it will get modified in the Segment Parser Loop.
+ mInputBuffer = Some(MediaSpan::WithCopyOf(mInitData));
+ RecreateParser(true);
+ } else {
+ RecreateParser(false);
+ }
+}
+
+int64_t TrackBuffersManager::EvictionThreshold() const {
+ if (HasVideo()) {
+ return mVideoEvictionThreshold;
+ }
+ return mAudioEvictionThreshold;
+}
+
+void TrackBuffersManager::DoEvictData(const TimeUnit& aPlaybackTime,
+ int64_t aSizeToEvict) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ AUTO_PROFILER_LABEL("TrackBuffersManager::DoEvictData", MEDIA_PLAYBACK);
+
+ mEvictionState = EvictionState::EVICTION_COMPLETED;
+
+ // Video is what takes the most space, only evict there if we have video.
+ auto& track = HasVideo() ? mVideoTracks : mAudioTracks;
+ const auto& buffer = track.GetTrackBuffer();
+ if (buffer.IsEmpty()) {
+ // Buffer has been emptied while the eviction was queued, nothing to do.
+ return;
+ }
+ if (track.mBufferedRanges.IsEmpty()) {
+ MSE_DEBUG(
+ "DoEvictData running with no buffered ranges. 0 duration data likely "
+ "present in our buffer(s). Evicting all data!");
+ // We have no buffered ranges, but may still have data. This happens if the
+ // buffer is full of 0 duration data. Normal removal procedures don't clear
+ // 0 duration data, so blow away all our data.
+ RemoveAllCodedFrames();
+ return;
+ }
+ // Remove any data we've already played, or before the next sample to be
+ // demuxed whichever is lowest.
+ TimeUnit lowerLimit = std::min(track.mNextSampleTime, aPlaybackTime);
+ uint32_t lastKeyFrameIndex = 0;
+ int64_t toEvict = aSizeToEvict;
+ int64_t partialEvict = 0;
+ for (uint32_t i = 0; i < buffer.Length(); i++) {
+ const auto& frame = buffer[i];
+ if (frame->mKeyframe) {
+ lastKeyFrameIndex = i;
+ toEvict -= partialEvict;
+ if (toEvict < 0) {
+ break;
+ }
+ partialEvict = 0;
+ }
+ if (frame->GetEndTime() >= lowerLimit) {
+ break;
+ }
+ partialEvict += frame->ComputedSizeOfIncludingThis();
+ }
+
+ const int64_t finalSize = mSizeSourceBuffer - aSizeToEvict;
+
+ if (lastKeyFrameIndex > 0) {
+ MSE_DEBUG("Step1. Evicting %" PRId64 " bytes prior currentTime",
+ aSizeToEvict - toEvict);
+ TimeUnit start = track.mBufferedRanges[0].mStart;
+ TimeUnit end =
+ buffer[lastKeyFrameIndex]->mTime - TimeUnit::FromMicroseconds(1);
+ if (end > start) {
+ CodedFrameRemoval(TimeInterval(start, end));
+ }
+ }
+
+ if (mSizeSourceBuffer <= finalSize) {
+ return;
+ }
+
+ toEvict = mSizeSourceBuffer - finalSize;
+
+ // See if we can evict data into the future.
+ // We do not evict data from the currently used buffered interval.
+
+ TimeUnit currentPosition = std::max(aPlaybackTime, track.mNextSampleTime);
+ TimeIntervals futureBuffered(
+ TimeInterval(currentPosition, TimeUnit::FromInfinity()));
+ futureBuffered.Intersection(track.mBufferedRanges);
+ futureBuffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ / 2);
+ if (futureBuffered.Length() <= 1) {
+ // We have one continuous segment ahead of us:
+ // nothing further can be evicted.
+ return;
+ }
+
+ // Don't evict before the end of the current segment
+ TimeUnit upperLimit = futureBuffered[0].mEnd;
+ uint32_t evictedFramesStartIndex = buffer.Length();
+ for (int32_t i = buffer.Length() - 1; i >= 0; i--) {
+ const auto& frame = buffer[i];
+ if (frame->mTime <= upperLimit || toEvict < 0) {
+ // We've reached a frame that shouldn't be evicted -> Evict after it ->
+ // i+1. Or the previous loop reached the eviction threshold -> Evict from
+ // it -> i+1.
+ evictedFramesStartIndex = i + 1;
+ break;
+ }
+ toEvict -= frame->ComputedSizeOfIncludingThis();
+ }
+ if (evictedFramesStartIndex < buffer.Length()) {
+ MSE_DEBUG("Step2. Evicting %" PRId64 " bytes from trailing data",
+ mSizeSourceBuffer - finalSize - toEvict);
+ CodedFrameRemoval(TimeInterval(buffer[evictedFramesStartIndex]->mTime,
+ TimeUnit::FromInfinity()));
+ }
+}
+
+RefPtr<TrackBuffersManager::RangeRemovalPromise>
+TrackBuffersManager::CodedFrameRemovalWithPromise(TimeInterval aInterval) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+
+ RefPtr<RangeRemovalTask> task = new RangeRemovalTask(aInterval);
+ RefPtr<RangeRemovalPromise> p = task->mPromise.Ensure(__func__);
+ QueueTask(task);
+
+ return p;
+}
+
+bool TrackBuffersManager::CodedFrameRemoval(TimeInterval aInterval) {
+ MOZ_ASSERT(OnTaskQueue());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::CodedFrameRemoval", MEDIA_PLAYBACK);
+ MSE_DEBUG("From %.2fs to %.2f", aInterval.mStart.ToSeconds(),
+ aInterval.mEnd.ToSeconds());
+
+#if DEBUG
+ if (HasVideo()) {
+ MSE_DEBUG("before video ranges=%s",
+ DumpTimeRangesRaw(mVideoTracks.mBufferedRanges).get());
+ }
+ if (HasAudio()) {
+ MSE_DEBUG("before audio ranges=%s",
+ DumpTimeRangesRaw(mAudioTracks.mBufferedRanges).get());
+ }
+#endif
+
+ // 1. Let start be the starting presentation timestamp for the removal range.
+ TimeUnit start = aInterval.mStart;
+ // 2. Let end be the end presentation timestamp for the removal range.
+ TimeUnit end = aInterval.mEnd;
+
+ bool dataRemoved = false;
+
+ // 3. For each track buffer in this source buffer, run the following steps:
+ for (auto track : GetTracksList()) {
+ MSE_DEBUGV("Processing %s track", track->mInfo->mMimeType.get());
+ // 1. Let remove end timestamp be the current value of duration
+ // See bug: https://www.w3.org/Bugs/Public/show_bug.cgi?id=28727
+ // At worse we will remove all frames until the end, unless a key frame is
+ // found between the current interval's end and the trackbuffer's end.
+ TimeUnit removeEndTimestamp = track->mBufferedRanges.GetEnd();
+
+ if (start > removeEndTimestamp) {
+ // Nothing to remove.
+ continue;
+ }
+
+ // 2. If this track buffer has a random access point timestamp that is
+ // greater than or equal to end, then update remove end timestamp to that
+ // random access point timestamp.
+ if (end < track->mBufferedRanges.GetEnd()) {
+ for (auto& frame : track->GetTrackBuffer()) {
+ if (frame->mKeyframe && frame->mTime >= end) {
+ removeEndTimestamp = frame->mTime;
+ break;
+ }
+ }
+ }
+
+ // 3. Remove all media data, from this track buffer, that contain starting
+ // timestamps greater than or equal to start and less than the remove end
+ // timestamp.
+ // 4. Remove decoding dependencies of the coded frames removed in the
+ // previous step: Remove all coded frames between the coded frames removed
+ // in the previous step and the next random access point after those removed
+ // frames.
+ TimeIntervals removedInterval{TimeInterval(start, removeEndTimestamp)};
+ RemoveFrames(removedInterval, *track, 0, RemovalMode::kRemoveFrame);
+
+ // 5. If this object is in activeSourceBuffers, the current playback
+ // position is greater than or equal to start and less than the remove end
+ // timestamp, and HTMLMediaElement.readyState is greater than HAVE_METADATA,
+ // then set the HTMLMediaElement.readyState attribute to HAVE_METADATA and
+ // stall playback. This will be done by the MDSM during playback.
+ // TODO properly, so it works even if paused.
+ }
+
+ UpdateBufferedRanges();
+
+ // Update our reported total size.
+ mSizeSourceBuffer = mVideoTracks.mSizeBuffer + mAudioTracks.mSizeBuffer;
+
+ // 4. If buffer full flag equals true and this object is ready to accept more
+ // bytes, then set the buffer full flag to false.
+ if (mBufferFull && mSizeSourceBuffer < EvictionThreshold()) {
+ mBufferFull = false;
+ }
+
+ return dataRemoved;
+}
+
+void TrackBuffersManager::RemoveAllCodedFrames() {
+ // This is similar to RemoveCodedFrames, but will attempt to remove ALL
+ // the frames. This is not to spec, as explained below at step 3.1. Steps
+ // below coincide with Remove Coded Frames algorithm from the spec.
+ MSE_DEBUG("RemoveAllCodedFrames called.");
+ MOZ_ASSERT(OnTaskQueue());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::RemoveAllCodedFrames",
+ MEDIA_PLAYBACK);
+
+ // 1. Let start be the starting presentation timestamp for the removal range.
+ TimeUnit start{};
+ // 2. Let end be the end presentation timestamp for the removal range.
+ TimeUnit end = TimeUnit::FromMicroseconds(1);
+ // Find an end time such that our range will include every frame in every
+ // track. We do this by setting the end of our interval to the largest end
+ // time seen + 1 microsecond.
+ for (TrackData* track : GetTracksList()) {
+ for (auto& frame : track->GetTrackBuffer()) {
+ MOZ_ASSERT(frame->mTime >= start,
+ "Shouldn't have frame at negative time!");
+ TimeUnit frameEnd = frame->mTime + frame->mDuration;
+ if (frameEnd > end) {
+ end = frameEnd + TimeUnit::FromMicroseconds(1);
+ }
+ }
+ }
+
+ // 3. For each track buffer in this source buffer, run the following steps:
+ TimeIntervals removedInterval{TimeInterval(start, end)};
+ for (TrackData* track : GetTracksList()) {
+ // 1. Let remove end timestamp be the current value of duration
+ // ^ It's off spec, but we ignore this in order to clear 0 duration frames.
+ // If we don't ignore this rule and our buffer is full of 0 duration frames
+ // at timestamp n, we get an eviction range of [0, n). When we get to step
+ // 3.3 below, the 0 duration frames will not be evicted because their
+ // timestamp is not less than remove end timestamp -- it will in fact be
+ // equal to remove end timestamp.
+ //
+ // 2. If this track buffer has a random access point timestamp that is
+ // greater than or equal to end, then update remove end timestamp to that
+ // random access point timestamp.
+ // ^ We've made sure end > any sample's timestamp, so can skip this.
+ //
+ // 3. Remove all media data, from this track buffer, that contain starting
+ // timestamps greater than or equal to start and less than the remove end
+ // timestamp.
+ // 4. Remove decoding dependencies of the coded frames removed in the
+ // previous step: Remove all coded frames between the coded frames removed
+ // in the previous step and the next random access point after those removed
+ // frames.
+
+ // This should remove every frame in the track because removedInterval was
+ // constructed such that every frame in any track falls into that interval.
+ RemoveFrames(removedInterval, *track, 0, RemovalMode::kRemoveFrame);
+
+ // 5. If this object is in activeSourceBuffers, the current playback
+ // position is greater than or equal to start and less than the remove end
+ // timestamp, and HTMLMediaElement.readyState is greater than HAVE_METADATA,
+ // then set the HTMLMediaElement.readyState attribute to HAVE_METADATA and
+ // stall playback. This will be done by the MDSM during playback.
+ // TODO properly, so it works even if paused.
+ }
+
+ UpdateBufferedRanges();
+#ifdef DEBUG
+ {
+ MutexAutoLock lock(mMutex);
+ MOZ_ASSERT(
+ mAudioBufferedRanges.IsEmpty(),
+ "Should have no buffered video ranges after evicting everything.");
+ MOZ_ASSERT(
+ mVideoBufferedRanges.IsEmpty(),
+ "Should have no buffered video ranges after evicting everything.");
+ }
+#endif
+ mSizeSourceBuffer = mVideoTracks.mSizeBuffer + mAudioTracks.mSizeBuffer;
+ MOZ_ASSERT(mSizeSourceBuffer == 0,
+ "Buffer should be empty after evicting everything!");
+ if (mBufferFull && mSizeSourceBuffer < EvictionThreshold()) {
+ mBufferFull = false;
+ }
+}
+
+void TrackBuffersManager::UpdateBufferedRanges() {
+ MutexAutoLock mut(mMutex);
+
+ mVideoBufferedRanges = mVideoTracks.mSanitizedBufferedRanges;
+ mAudioBufferedRanges = mAudioTracks.mSanitizedBufferedRanges;
+
+#if DEBUG
+ if (HasVideo()) {
+ MSE_DEBUG("after video ranges=%s",
+ DumpTimeRangesRaw(mVideoTracks.mBufferedRanges).get());
+ }
+ if (HasAudio()) {
+ MSE_DEBUG("after audio ranges=%s",
+ DumpTimeRangesRaw(mAudioTracks.mBufferedRanges).get());
+ }
+#endif
+}
+
+void TrackBuffersManager::SegmentParserLoop() {
+ MOZ_ASSERT(OnTaskQueue());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::SegmentParserLoop", MEDIA_PLAYBACK);
+
+ while (true) {
+ // 1. If the input buffer is empty, then jump to the need more data step
+ // below.
+ if (!mInputBuffer || mInputBuffer->IsEmpty()) {
+ NeedMoreData();
+ return;
+ }
+ // 2. If the input buffer contains bytes that violate the SourceBuffer
+ // byte stream format specification, then run the append error algorithm
+ // with the decode error parameter set to true and abort this algorithm.
+ // TODO
+
+ // 3. Remove any bytes that the byte stream format specifications say must
+ // be ignored from the start of the input buffer. We do not remove bytes
+ // from our input buffer. Instead we enforce that our ContainerParser is
+ // able to skip over all data that is supposed to be ignored.
+
+ // 4. If the append state equals WAITING_FOR_SEGMENT, then run the following
+ // steps:
+ if (mSourceBufferAttributes->GetAppendState() ==
+ AppendState::WAITING_FOR_SEGMENT) {
+ MediaResult haveInitSegment =
+ mParser->IsInitSegmentPresent(*mInputBuffer);
+ if (NS_SUCCEEDED(haveInitSegment)) {
+ SetAppendState(AppendState::PARSING_INIT_SEGMENT);
+ if (mFirstInitializationSegmentReceived && !mChangeTypeReceived) {
+ // This is a new initialization segment. Obsolete the old one.
+ RecreateParser(false);
+ }
+ continue;
+ }
+ MediaResult haveMediaSegment =
+ mParser->IsMediaSegmentPresent(*mInputBuffer);
+ if (NS_SUCCEEDED(haveMediaSegment)) {
+ SetAppendState(AppendState::PARSING_MEDIA_SEGMENT);
+ mNewMediaSegmentStarted = true;
+ continue;
+ }
+ // We have neither an init segment nor a media segment.
+ // Check if it was invalid data.
+ if (haveInitSegment != NS_ERROR_NOT_AVAILABLE) {
+ MSE_DEBUG("Found invalid data.");
+ RejectAppend(haveInitSegment, __func__);
+ return;
+ }
+ if (haveMediaSegment != NS_ERROR_NOT_AVAILABLE) {
+ MSE_DEBUG("Found invalid data.");
+ RejectAppend(haveMediaSegment, __func__);
+ return;
+ }
+ MSE_DEBUG("Found incomplete data.");
+ NeedMoreData();
+ return;
+ }
+
+ MOZ_ASSERT(mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_INIT_SEGMENT ||
+ mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_MEDIA_SEGMENT);
+
+ TimeUnit start, end;
+ MediaResult newData = NS_ERROR_NOT_AVAILABLE;
+
+ if (mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_INIT_SEGMENT ||
+ (mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_MEDIA_SEGMENT &&
+ mFirstInitializationSegmentReceived && !mChangeTypeReceived)) {
+ newData = mParser->ParseStartAndEndTimestamps(*mInputBuffer, start, end);
+ if (NS_FAILED(newData) && newData.Code() != NS_ERROR_NOT_AVAILABLE) {
+ RejectAppend(newData, __func__);
+ return;
+ }
+ mProcessedInput += mInputBuffer->Length();
+ }
+
+ // 5. If the append state equals PARSING_INIT_SEGMENT, then run the
+ // following steps:
+ if (mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_INIT_SEGMENT) {
+ if (mParser->InitSegmentRange().IsEmpty()) {
+ mInputBuffer.reset();
+ NeedMoreData();
+ return;
+ }
+ InitializationSegmentReceived();
+ return;
+ }
+ if (mSourceBufferAttributes->GetAppendState() ==
+ AppendState::PARSING_MEDIA_SEGMENT) {
+ // 1. If the first initialization segment received flag is false, then run
+ // the append error algorithm with the decode error parameter set to
+ // true and abort this algorithm.
+ // Or we are in the process of changeType, in which case we must first
+ // get an init segment before getting a media segment.
+ if (!mFirstInitializationSegmentReceived || mChangeTypeReceived) {
+ RejectAppend(NS_ERROR_FAILURE, __func__);
+ return;
+ }
+
+ // We can't feed some demuxers (WebMDemuxer) with data that do not have
+ // monotonizally increasing timestamps. So we check if we have a
+ // discontinuity from the previous segment parsed.
+ // If so, recreate a new demuxer to ensure that the demuxer is only fed
+ // monotonically increasing data.
+ if (mNewMediaSegmentStarted) {
+ if (NS_SUCCEEDED(newData) && mLastParsedEndTime.isSome() &&
+ start < mLastParsedEndTime.ref()) {
+ MSE_DEBUG("Re-creating demuxer");
+ ResetDemuxingState();
+ return;
+ }
+ if (NS_SUCCEEDED(newData) || !mParser->MediaSegmentRange().IsEmpty()) {
+ if (mPendingInputBuffer) {
+ // We now have a complete media segment header. We can resume
+ // parsing the data.
+ AppendDataToCurrentInputBuffer(*mPendingInputBuffer);
+ mPendingInputBuffer.reset();
+ }
+ mNewMediaSegmentStarted = false;
+ } else {
+ // We don't have any data to demux yet, stash aside the data.
+ // This also handles the case:
+ // 2. If the input buffer does not contain a complete media segment
+ // header yet, then jump to the need more data step below.
+ if (!mPendingInputBuffer) {
+ mPendingInputBuffer = Some(MediaSpan(*mInputBuffer));
+ } else {
+ // Note we reset mInputBuffer below, so this won't end up appending
+ // the contents of mInputBuffer to itself.
+ mPendingInputBuffer->Append(*mInputBuffer);
+ }
+
+ mInputBuffer.reset();
+ NeedMoreData();
+ return;
+ }
+ }
+
+ // 3. If the input buffer contains one or more complete coded frames, then
+ // run the coded frame processing algorithm.
+ RefPtr<TrackBuffersManager> self = this;
+ CodedFrameProcessing()
+ ->Then(
+ TaskQueueFromTaskQueue(), __func__,
+ [self](bool aNeedMoreData) {
+ self->mTaskQueueCapability->AssertOnCurrentThread();
+ self->mProcessingRequest.Complete();
+ if (aNeedMoreData) {
+ self->NeedMoreData();
+ } else {
+ self->ScheduleSegmentParserLoop();
+ }
+ },
+ [self](const MediaResult& aRejectValue) {
+ self->mTaskQueueCapability->AssertOnCurrentThread();
+ self->mProcessingRequest.Complete();
+ self->RejectAppend(aRejectValue, __func__);
+ })
+ ->Track(mProcessingRequest);
+ return;
+ }
+ }
+}
+
+void TrackBuffersManager::NeedMoreData() {
+ MSE_DEBUG("");
+ MOZ_DIAGNOSTIC_ASSERT(mCurrentTask &&
+ mCurrentTask->GetType() ==
+ SourceBufferTask::Type::AppendBuffer);
+ MOZ_DIAGNOSTIC_ASSERT(mSourceBufferAttributes);
+
+ mCurrentTask->As<AppendBufferTask>()->mPromise.Resolve(
+ SourceBufferTask::AppendBufferResult(mActiveTrack,
+ *mSourceBufferAttributes),
+ __func__);
+ mSourceBufferAttributes = nullptr;
+ mCurrentTask = nullptr;
+ ProcessTasks();
+}
+
+void TrackBuffersManager::RejectAppend(const MediaResult& aRejectValue,
+ const char* aName) {
+ MSE_DEBUG("rv=%" PRIu32, static_cast<uint32_t>(aRejectValue.Code()));
+ MOZ_DIAGNOSTIC_ASSERT(mCurrentTask &&
+ mCurrentTask->GetType() ==
+ SourceBufferTask::Type::AppendBuffer);
+
+ mCurrentTask->As<AppendBufferTask>()->mPromise.Reject(aRejectValue, __func__);
+ mSourceBufferAttributes = nullptr;
+ mCurrentTask = nullptr;
+ ProcessTasks();
+}
+
+void TrackBuffersManager::ScheduleSegmentParserLoop() {
+ MOZ_ASSERT(OnTaskQueue());
+ TaskQueueFromTaskQueue()->Dispatch(
+ NewRunnableMethod("TrackBuffersManager::SegmentParserLoop", this,
+ &TrackBuffersManager::SegmentParserLoop));
+}
+
+void TrackBuffersManager::ShutdownDemuxers() {
+ if (mVideoTracks.mDemuxer) {
+ mVideoTracks.mDemuxer->BreakCycles();
+ mVideoTracks.mDemuxer = nullptr;
+ }
+ if (mAudioTracks.mDemuxer) {
+ mAudioTracks.mDemuxer->BreakCycles();
+ mAudioTracks.mDemuxer = nullptr;
+ }
+ // We shouldn't change mInputDemuxer while a demuxer init/reset request is
+ // being processed. See bug 1239983.
+ MOZ_DIAGNOSTIC_ASSERT(!mDemuxerInitRequest.Exists());
+ mInputDemuxer = nullptr;
+ mLastParsedEndTime.reset();
+}
+
+void TrackBuffersManager::CreateDemuxerforMIMEType() {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ MSE_DEBUG("mType.OriginalString=%s", mType.OriginalString().get());
+ ShutdownDemuxers();
+
+ if (mType.Type() == MEDIAMIMETYPE(VIDEO_WEBM) ||
+ mType.Type() == MEDIAMIMETYPE(AUDIO_WEBM)) {
+ mInputDemuxer =
+ new WebMDemuxer(mCurrentInputBuffer, true /* IsMediaSource*/);
+ DDLINKCHILD("demuxer", mInputDemuxer.get());
+ return;
+ }
+
+#ifdef MOZ_FMP4
+ if (mType.Type() == MEDIAMIMETYPE(VIDEO_MP4) ||
+ mType.Type() == MEDIAMIMETYPE(AUDIO_MP4)) {
+ mInputDemuxer = new MP4Demuxer(mCurrentInputBuffer);
+ DDLINKCHILD("demuxer", mInputDemuxer.get());
+ return;
+ }
+#endif
+ NS_WARNING("Not supported (yet)");
+}
+
+// We reset the demuxer by creating a new one and initializing it.
+void TrackBuffersManager::ResetDemuxingState() {
+ MOZ_ASSERT(OnTaskQueue());
+ MOZ_ASSERT(mParser && mParser->HasInitData());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::ResetDemuxingState",
+ MEDIA_PLAYBACK);
+ RecreateParser(true);
+ mCurrentInputBuffer = new SourceBufferResource();
+ // The demuxer isn't initialized yet ; we don't want to notify it
+ // that data has been appended yet ; so we simply append the init segment
+ // to the resource.
+ mCurrentInputBuffer->AppendData(mParser->InitData());
+ CreateDemuxerforMIMEType();
+ if (!mInputDemuxer) {
+ RejectAppend(NS_ERROR_FAILURE, __func__);
+ return;
+ }
+ mInputDemuxer->Init()
+ ->Then(TaskQueueFromTaskQueue(), __func__, this,
+ &TrackBuffersManager::OnDemuxerResetDone,
+ &TrackBuffersManager::OnDemuxerInitFailed)
+ ->Track(mDemuxerInitRequest);
+}
+
+void TrackBuffersManager::OnDemuxerResetDone(const MediaResult& aResult) {
+ MOZ_ASSERT(OnTaskQueue());
+ mDemuxerInitRequest.Complete();
+
+ if (NS_FAILED(aResult) && StaticPrefs::media_playback_warnings_as_errors()) {
+ RejectAppend(aResult, __func__);
+ return;
+ }
+
+ // mInputDemuxer shouldn't have been destroyed while a demuxer init/reset
+ // request was being processed. See bug 1239983.
+ MOZ_DIAGNOSTIC_ASSERT(mInputDemuxer);
+
+ if (aResult != NS_OK && mParentDecoder) {
+ RefPtr<TrackBuffersManager> self = this;
+ mAbstractMainThread->Dispatch(NS_NewRunnableFunction(
+ "TrackBuffersManager::OnDemuxerResetDone", [self, aResult]() {
+ if (self->mParentDecoder && self->mParentDecoder->GetOwner()) {
+ self->mParentDecoder->GetOwner()->DecodeWarning(aResult);
+ }
+ }));
+ }
+
+ // Recreate track demuxers.
+ uint32_t numVideos = mInputDemuxer->GetNumberTracks(TrackInfo::kVideoTrack);
+ if (numVideos) {
+ // We currently only handle the first video track.
+ mVideoTracks.mDemuxer =
+ mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
+ MOZ_ASSERT(mVideoTracks.mDemuxer);
+ DDLINKCHILD("video demuxer", mVideoTracks.mDemuxer.get());
+ }
+
+ uint32_t numAudios = mInputDemuxer->GetNumberTracks(TrackInfo::kAudioTrack);
+ if (numAudios) {
+ // We currently only handle the first audio track.
+ mAudioTracks.mDemuxer =
+ mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
+ MOZ_ASSERT(mAudioTracks.mDemuxer);
+ DDLINKCHILD("audio demuxer", mAudioTracks.mDemuxer.get());
+ }
+
+ if (mPendingInputBuffer) {
+ // We had a partial media segment header stashed aside.
+ // Reparse its content so we can continue parsing the current input buffer.
+ TimeUnit start, end;
+ mParser->ParseStartAndEndTimestamps(*mPendingInputBuffer, start, end);
+ mProcessedInput += mPendingInputBuffer->Length();
+ }
+
+ SegmentParserLoop();
+}
+
+void TrackBuffersManager::AppendDataToCurrentInputBuffer(
+ const MediaSpan& aData) {
+ MOZ_ASSERT(mCurrentInputBuffer);
+ mCurrentInputBuffer->AppendData(aData);
+ mInputDemuxer->NotifyDataArrived();
+}
+
+void TrackBuffersManager::InitializationSegmentReceived() {
+ MOZ_ASSERT(OnTaskQueue());
+ MOZ_ASSERT(mParser->HasCompleteInitData());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::InitializationSegmentReceived",
+ MEDIA_PLAYBACK);
+
+ int64_t endInit = mParser->InitSegmentRange().mEnd;
+ if (mInputBuffer->Length() > mProcessedInput ||
+ int64_t(mProcessedInput - mInputBuffer->Length()) > endInit) {
+ // Something is not quite right with the data appended. Refuse it.
+ RejectAppend(MediaResult(NS_ERROR_FAILURE,
+ "Invalid state following initialization segment"),
+ __func__);
+ return;
+ }
+
+ mCurrentInputBuffer = new SourceBufferResource();
+ // The demuxer isn't initialized yet ; we don't want to notify it
+ // that data has been appended yet ; so we simply append the init segment
+ // to the resource.
+ mCurrentInputBuffer->AppendData(mParser->InitData());
+ uint32_t length = endInit - (mProcessedInput - mInputBuffer->Length());
+ MOZ_RELEASE_ASSERT(length <= mInputBuffer->Length());
+ mInputBuffer->RemoveFront(length);
+ CreateDemuxerforMIMEType();
+ if (!mInputDemuxer) {
+ NS_WARNING("TODO type not supported");
+ RejectAppend(NS_ERROR_DOM_NOT_SUPPORTED_ERR, __func__);
+ return;
+ }
+ mInputDemuxer->Init()
+ ->Then(TaskQueueFromTaskQueue(), __func__, this,
+ &TrackBuffersManager::OnDemuxerInitDone,
+ &TrackBuffersManager::OnDemuxerInitFailed)
+ ->Track(mDemuxerInitRequest);
+}
+
+bool TrackBuffersManager::IsRepeatInitData(
+ const MediaInfo& aNewMediaInfo) const {
+ MOZ_ASSERT(OnTaskQueue());
+ if (!mInitData) {
+ // There is no previous init data, so this cannot be a repeat.
+ return false;
+ }
+
+ if (mChangeTypeReceived) {
+ // If we're received change type we want to reprocess init data.
+ return false;
+ }
+
+ MOZ_DIAGNOSTIC_ASSERT(mInitData, "Init data should be non-null");
+ if (*mInitData == *mParser->InitData()) {
+ // We have previous init data, and it's the same binary data as we've just
+ // parsed.
+ return true;
+ }
+
+ // At this point the binary data doesn't match, but it's possible to have the
+ // different binary representations for the same logical init data. These
+ // checks can be revised as we encounter such cases in the wild.
+
+ bool audioInfoIsRepeat = false;
+ if (aNewMediaInfo.HasAudio()) {
+ if (!mAudioTracks.mLastInfo) {
+ // There is no old audio info, so this can't be a repeat.
+ return false;
+ }
+ audioInfoIsRepeat =
+ *mAudioTracks.mLastInfo->GetAsAudioInfo() == aNewMediaInfo.mAudio;
+ if (!aNewMediaInfo.HasVideo()) {
+ // Only have audio.
+ return audioInfoIsRepeat;
+ }
+ }
+
+ bool videoInfoIsRepeat = false;
+ if (aNewMediaInfo.HasVideo()) {
+ if (!mVideoTracks.mLastInfo) {
+ // There is no old video info, so this can't be a repeat.
+ return false;
+ }
+ videoInfoIsRepeat =
+ *mVideoTracks.mLastInfo->GetAsVideoInfo() == aNewMediaInfo.mVideo;
+ if (!aNewMediaInfo.HasAudio()) {
+ // Only have video.
+ return videoInfoIsRepeat;
+ }
+ }
+
+ if (audioInfoIsRepeat && videoInfoIsRepeat) {
+ MOZ_DIAGNOSTIC_ASSERT(
+ aNewMediaInfo.HasVideo() && aNewMediaInfo.HasAudio(),
+ "This should only be reachable if audio and video are present");
+ // Video + audio are present and both have the same init data.
+ return true;
+ }
+
+ return false;
+}
+
+void TrackBuffersManager::OnDemuxerInitDone(const MediaResult& aResult) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ MOZ_DIAGNOSTIC_ASSERT(mInputDemuxer, "mInputDemuxer has been destroyed");
+ AUTO_PROFILER_LABEL("TrackBuffersManager::OnDemuxerInitDone", MEDIA_PLAYBACK);
+
+ mDemuxerInitRequest.Complete();
+
+ if (NS_FAILED(aResult) && StaticPrefs::media_playback_warnings_as_errors()) {
+ RejectAppend(aResult, __func__);
+ return;
+ }
+
+ MediaInfo info;
+
+ uint32_t numVideos = mInputDemuxer->GetNumberTracks(TrackInfo::kVideoTrack);
+ if (numVideos) {
+ // We currently only handle the first video track.
+ mVideoTracks.mDemuxer =
+ mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
+ MOZ_ASSERT(mVideoTracks.mDemuxer);
+ DDLINKCHILD("video demuxer", mVideoTracks.mDemuxer.get());
+ info.mVideo = *mVideoTracks.mDemuxer->GetInfo()->GetAsVideoInfo();
+ info.mVideo.mTrackId = 2;
+ }
+
+ uint32_t numAudios = mInputDemuxer->GetNumberTracks(TrackInfo::kAudioTrack);
+ if (numAudios) {
+ // We currently only handle the first audio track.
+ mAudioTracks.mDemuxer =
+ mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
+ MOZ_ASSERT(mAudioTracks.mDemuxer);
+ DDLINKCHILD("audio demuxer", mAudioTracks.mDemuxer.get());
+ info.mAudio = *mAudioTracks.mDemuxer->GetInfo()->GetAsAudioInfo();
+ info.mAudio.mTrackId = 1;
+ }
+
+ TimeUnit videoDuration = numVideos ? info.mVideo.mDuration : TimeUnit::Zero();
+ TimeUnit audioDuration = numAudios ? info.mAudio.mDuration : TimeUnit::Zero();
+
+ TimeUnit duration = std::max(videoDuration, audioDuration);
+ // 1. Update the duration attribute if it currently equals NaN.
+ // Those steps are performed by the MediaSourceDecoder::SetInitialDuration
+ mAbstractMainThread->Dispatch(NewRunnableMethod<TimeUnit>(
+ "MediaSourceDecoder::SetInitialDuration", mParentDecoder.get(),
+ &MediaSourceDecoder::SetInitialDuration,
+ !duration.IsZero() ? duration : TimeUnit::FromInfinity()));
+
+ // 2. If the initialization segment has no audio, video, or text tracks, then
+ // run the append error algorithm with the decode error parameter set to true
+ // and abort these steps.
+ if (!numVideos && !numAudios) {
+ RejectAppend(NS_ERROR_FAILURE, __func__);
+ return;
+ }
+
+ // 3. If the first initialization segment received flag is true, then run the
+ // following steps:
+ if (mFirstInitializationSegmentReceived) {
+ if (numVideos != mVideoTracks.mNumTracks ||
+ numAudios != mAudioTracks.mNumTracks) {
+ RejectAppend(NS_ERROR_FAILURE, __func__);
+ return;
+ }
+ // 1. If more than one track for a single type are present (ie 2 audio
+ // tracks), then the Track IDs match the ones in the first initialization
+ // segment.
+ // TODO
+ // 2. Add the appropriate track descriptions from this initialization
+ // segment to each of the track buffers.
+ // TODO
+ // 3. Set the need random access point flag on all track buffers to true.
+ mVideoTracks.mNeedRandomAccessPoint = true;
+ mAudioTracks.mNeedRandomAccessPoint = true;
+ }
+
+ // Check if we've received the same init data again. Some streams will
+ // resend the same data. In these cases we don't need to change the stream
+ // id as it's the same stream. Doing so would recreate decoders, possibly
+ // leading to gaps in audio and/or video (see bug 1450952).
+ bool isRepeatInitData = IsRepeatInitData(info);
+
+ MOZ_ASSERT(mFirstInitializationSegmentReceived || !isRepeatInitData,
+ "Should never detect repeat init data for first segment!");
+
+ // If we have new init data we configure and set track info as needed. If we
+ // have repeat init data we carry forward our existing track info.
+ if (!isRepeatInitData) {
+ // Increase our stream id.
+ uint32_t streamID = sStreamSourceID++;
+
+ // 4. Let active track flag equal false.
+ bool activeTrack = false;
+
+ // 5. If the first initialization segment received flag is false, then run
+ // the following steps:
+ if (!mFirstInitializationSegmentReceived) {
+ MSE_DEBUG("Get first init data");
+ mAudioTracks.mNumTracks = numAudios;
+ // TODO:
+ // 1. If the initialization segment contains tracks with codecs the user
+ // agent does not support, then run the append error algorithm with the
+ // decode error parameter set to true and abort these steps.
+
+ // 2. For each audio track in the initialization segment, run following
+ // steps: for (uint32_t i = 0; i < numAudios; i++) {
+ if (numAudios) {
+ // 1. Let audio byte stream track ID be the Track ID for the current
+ // track being processed.
+ // 2. Let audio language be a BCP 47 language tag for the language
+ // specified in the initialization segment for this track or an empty
+ // string if no language info is present.
+ // 3. If audio language equals an empty string or the 'und' BCP 47
+ // value, then run the default track language algorithm with
+ // byteStreamTrackID set to audio byte stream track ID and type set to
+ // "audio" and assign the value returned by the algorithm to audio
+ // language.
+ // 4. Let audio label be a label specified in the initialization segment
+ // for this track or an empty string if no label info is present.
+ // 5. If audio label equals an empty string, then run the default track
+ // label algorithm with byteStreamTrackID set to audio byte stream track
+ // ID and type set to "audio" and assign the value returned by the
+ // algorithm to audio label.
+ // 6. Let audio kinds be an array of kind strings specified in the
+ // initialization segment for this track or an empty array if no kind
+ // information is provided.
+ // 7. If audio kinds equals an empty array, then run the default track
+ // kinds algorithm with byteStreamTrackID set to audio byte stream track
+ // ID and type set to "audio" and assign the value returned by the
+ // algorithm to audio kinds.
+ // 8. For each value in audio kinds, run the following steps:
+ // 1. Let current audio kind equal the value from audio kinds for this
+ // iteration of the loop.
+ // 2. Let new audio track be a new AudioTrack object.
+ // 3. Generate a unique ID and assign it to the id property on new
+ // audio track.
+ // 4. Assign audio language to the language property on new audio
+ // track.
+ // 5. Assign audio label to the label property on new audio track.
+ // 6. Assign current audio kind to the kind property on new audio
+ // track.
+ // 7. If audioTracks.length equals 0, then run the following steps:
+ // 1. Set the enabled property on new audio track to true.
+ // 2. Set active track flag to true.
+ activeTrack = true;
+ // 8. Add new audio track to the audioTracks attribute on this
+ // SourceBuffer object.
+ // 9. Queue a task to fire a trusted event named addtrack, that does
+ // not bubble and is not cancelable, and that uses the TrackEvent
+ // interface, at the AudioTrackList object referenced by the
+ // audioTracks attribute on this SourceBuffer object.
+ // 10. Add new audio track to the audioTracks attribute on the
+ // HTMLMediaElement.
+ // 11. Queue a task to fire a trusted event named addtrack, that does
+ // not bubble and is not cancelable, and that uses the TrackEvent
+ // interface, at the AudioTrackList object referenced by the
+ // audioTracks attribute on the HTMLMediaElement.
+ mAudioTracks.mBuffers.AppendElement(TrackBuffer());
+ // 10. Add the track description for this track to the track buffer.
+ mAudioTracks.mInfo = new TrackInfoSharedPtr(info.mAudio, streamID);
+ mAudioTracks.mLastInfo = mAudioTracks.mInfo;
+ }
+
+ mVideoTracks.mNumTracks = numVideos;
+ // 3. For each video track in the initialization segment, run following
+ // steps: for (uint32_t i = 0; i < numVideos; i++) {
+ if (numVideos) {
+ // 1. Let video byte stream track ID be the Track ID for the current
+ // track being processed.
+ // 2. Let video language be a BCP 47 language tag for the language
+ // specified in the initialization segment for this track or an empty
+ // string if no language info is present.
+ // 3. If video language equals an empty string or the 'und' BCP 47
+ // value, then run the default track language algorithm with
+ // byteStreamTrackID set to video byte stream track ID and type set to
+ // "video" and assign the value returned by the algorithm to video
+ // language.
+ // 4. Let video label be a label specified in the initialization segment
+ // for this track or an empty string if no label info is present.
+ // 5. If video label equals an empty string, then run the default track
+ // label algorithm with byteStreamTrackID set to video byte stream track
+ // ID and type set to "video" and assign the value returned by the
+ // algorithm to video label.
+ // 6. Let video kinds be an array of kind strings specified in the
+ // initialization segment for this track or an empty array if no kind
+ // information is provided.
+ // 7. If video kinds equals an empty array, then run the default track
+ // kinds algorithm with byteStreamTrackID set to video byte stream track
+ // ID and type set to "video" and assign the value returned by the
+ // algorithm to video kinds.
+ // 8. For each value in video kinds, run the following steps:
+ // 1. Let current video kind equal the value from video kinds for this
+ // iteration of the loop.
+ // 2. Let new video track be a new VideoTrack object.
+ // 3. Generate a unique ID and assign it to the id property on new
+ // video track.
+ // 4. Assign video language to the language property on new video
+ // track.
+ // 5. Assign video label to the label property on new video track.
+ // 6. Assign current video kind to the kind property on new video
+ // track.
+ // 7. If videoTracks.length equals 0, then run the following steps:
+ // 1. Set the selected property on new video track to true.
+ // 2. Set active track flag to true.
+ activeTrack = true;
+ // 8. Add new video track to the videoTracks attribute on this
+ // SourceBuffer object.
+ // 9. Queue a task to fire a trusted event named addtrack, that does
+ // not bubble and is not cancelable, and that uses the TrackEvent
+ // interface, at the VideoTrackList object referenced by the
+ // videoTracks attribute on this SourceBuffer object.
+ // 10. Add new video track to the videoTracks attribute on the
+ // HTMLMediaElement.
+ // 11. Queue a task to fire a trusted event named addtrack, that does
+ // not bubble and is not cancelable, and that uses the TrackEvent
+ // interface, at the VideoTrackList object referenced by the
+ // videoTracks attribute on the HTMLMediaElement.
+ mVideoTracks.mBuffers.AppendElement(TrackBuffer());
+ // 10. Add the track description for this track to the track buffer.
+ mVideoTracks.mInfo = new TrackInfoSharedPtr(info.mVideo, streamID);
+ mVideoTracks.mLastInfo = mVideoTracks.mInfo;
+ }
+ // 4. For each text track in the initialization segment, run following
+ // steps:
+ // 5. If active track flag equals true, then run the following steps:
+ // This is handled by SourceBuffer once the promise is resolved.
+ if (activeTrack) {
+ mActiveTrack = true;
+ }
+
+ // 6. Set first initialization segment received flag to true.
+ mFirstInitializationSegmentReceived = true;
+ } else {
+ MSE_DEBUG("Get new init data");
+ mAudioTracks.mLastInfo = new TrackInfoSharedPtr(info.mAudio, streamID);
+ mVideoTracks.mLastInfo = new TrackInfoSharedPtr(info.mVideo, streamID);
+ }
+
+ UniquePtr<EncryptionInfo> crypto = mInputDemuxer->GetCrypto();
+ if (crypto && crypto->IsEncrypted()) {
+ // Try and dispatch 'encrypted'. Won't go if ready state still
+ // HAVE_NOTHING.
+ for (uint32_t i = 0; i < crypto->mInitDatas.Length(); i++) {
+ nsCOMPtr<nsIRunnable> r = new DispatchKeyNeededEvent(
+ mParentDecoder, crypto->mInitDatas[i].mInitData,
+ crypto->mInitDatas[i].mType);
+ mAbstractMainThread->Dispatch(r.forget());
+ }
+ info.mCrypto = *crypto;
+ // We clear our crypto init data array, so the MediaFormatReader will
+ // not emit an encrypted event for the same init data again.
+ info.mCrypto.mInitDatas.Clear();
+ }
+
+ {
+ MutexAutoLock mut(mMutex);
+ mInfo = info;
+ }
+ }
+ // We now have a valid init data ; we can store it for later use.
+ mInitData = mParser->InitData();
+
+ // We have now completed the changeType operation.
+ mChangeTypeReceived = false;
+
+ // 3. Remove the initialization segment bytes from the beginning of the input
+ // buffer. This step has already been done in InitializationSegmentReceived
+ // when we transferred the content into mCurrentInputBuffer.
+ mCurrentInputBuffer->EvictAll();
+ mInputDemuxer->NotifyDataRemoved();
+ RecreateParser(true);
+
+ // 4. Set append state to WAITING_FOR_SEGMENT.
+ SetAppendState(AppendState::WAITING_FOR_SEGMENT);
+ // 5. Jump to the loop top step above.
+ ScheduleSegmentParserLoop();
+
+ if (aResult != NS_OK && mParentDecoder) {
+ RefPtr<TrackBuffersManager> self = this;
+ mAbstractMainThread->Dispatch(NS_NewRunnableFunction(
+ "TrackBuffersManager::OnDemuxerInitDone", [self, aResult]() {
+ if (self->mParentDecoder && self->mParentDecoder->GetOwner()) {
+ self->mParentDecoder->GetOwner()->DecodeWarning(aResult);
+ }
+ }));
+ }
+}
+
+void TrackBuffersManager::OnDemuxerInitFailed(const MediaResult& aError) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ MSE_DEBUG("");
+ MOZ_ASSERT(aError != NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA);
+ mDemuxerInitRequest.Complete();
+
+ RejectAppend(aError, __func__);
+}
+
+RefPtr<TrackBuffersManager::CodedFrameProcessingPromise>
+TrackBuffersManager::CodedFrameProcessing() {
+ MOZ_ASSERT(OnTaskQueue());
+ MOZ_ASSERT(mProcessingPromise.IsEmpty());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::CodedFrameProcessing",
+ MEDIA_PLAYBACK);
+
+ MediaByteRange mediaRange = mParser->MediaSegmentRange();
+ if (mediaRange.IsEmpty()) {
+ AppendDataToCurrentInputBuffer(*mInputBuffer);
+ mInputBuffer.reset();
+ } else {
+ MOZ_ASSERT(mProcessedInput >= mInputBuffer->Length());
+ if (int64_t(mProcessedInput - mInputBuffer->Length()) > mediaRange.mEnd) {
+ // Something is not quite right with the data appended. Refuse it.
+ // This would typically happen if the previous media segment was partial
+ // yet a new complete media segment was added.
+ return CodedFrameProcessingPromise::CreateAndReject(NS_ERROR_FAILURE,
+ __func__);
+ }
+ // The mediaRange is offset by the init segment position previously added.
+ uint32_t length =
+ mediaRange.mEnd - (mProcessedInput - mInputBuffer->Length());
+ if (!length) {
+ // We've completed our earlier media segment and no new data is to be
+ // processed. This happens with some containers that can't detect that a
+ // media segment is ending until a new one starts.
+ RefPtr<CodedFrameProcessingPromise> p =
+ mProcessingPromise.Ensure(__func__);
+ CompleteCodedFrameProcessing();
+ return p;
+ }
+ AppendDataToCurrentInputBuffer(mInputBuffer->To(length));
+ mInputBuffer->RemoveFront(length);
+ }
+
+ RefPtr<CodedFrameProcessingPromise> p = mProcessingPromise.Ensure(__func__);
+
+ DoDemuxVideo();
+
+ return p;
+}
+
+void TrackBuffersManager::OnDemuxFailed(TrackType aTrack,
+ const MediaResult& aError) {
+ MOZ_ASSERT(OnTaskQueue());
+ MSE_DEBUG("Failed to demux %s, failure:%s",
+ aTrack == TrackType::kVideoTrack ? "video" : "audio",
+ aError.ErrorName().get());
+ switch (aError.Code()) {
+ case NS_ERROR_DOM_MEDIA_END_OF_STREAM:
+ case NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA:
+ if (aTrack == TrackType::kVideoTrack) {
+ DoDemuxAudio();
+ } else {
+ CompleteCodedFrameProcessing();
+ }
+ break;
+ default:
+ RejectProcessing(aError, __func__);
+ break;
+ }
+}
+
+void TrackBuffersManager::DoDemuxVideo() {
+ MOZ_ASSERT(OnTaskQueue());
+ if (!HasVideo()) {
+ DoDemuxAudio();
+ return;
+ }
+ mVideoTracks.mDemuxer->GetSamples(-1)
+ ->Then(TaskQueueFromTaskQueue(), __func__, this,
+ &TrackBuffersManager::OnVideoDemuxCompleted,
+ &TrackBuffersManager::OnVideoDemuxFailed)
+ ->Track(mVideoTracks.mDemuxRequest);
+}
+
+void TrackBuffersManager::MaybeDispatchEncryptedEvent(
+ const nsTArray<RefPtr<MediaRawData>>& aSamples) {
+ // Try and dispatch 'encrypted'. Won't go if ready state still HAVE_NOTHING.
+ for (const RefPtr<MediaRawData>& sample : aSamples) {
+ for (const nsTArray<uint8_t>& initData : sample->mCrypto.mInitDatas) {
+ nsCOMPtr<nsIRunnable> r = new DispatchKeyNeededEvent(
+ mParentDecoder, initData, sample->mCrypto.mInitDataType);
+ mAbstractMainThread->Dispatch(r.forget());
+ }
+ }
+}
+
+void TrackBuffersManager::OnVideoDemuxCompleted(
+ RefPtr<MediaTrackDemuxer::SamplesHolder> aSamples) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ MSE_DEBUG("%zu video samples demuxed", aSamples->GetSamples().Length());
+ mVideoTracks.mDemuxRequest.Complete();
+ mVideoTracks.mQueuedSamples.AppendElements(aSamples->GetSamples());
+
+ MaybeDispatchEncryptedEvent(aSamples->GetSamples());
+ DoDemuxAudio();
+}
+
+void TrackBuffersManager::DoDemuxAudio() {
+ MOZ_ASSERT(OnTaskQueue());
+ if (!HasAudio()) {
+ CompleteCodedFrameProcessing();
+ return;
+ }
+ mAudioTracks.mDemuxer->GetSamples(-1)
+ ->Then(TaskQueueFromTaskQueue(), __func__, this,
+ &TrackBuffersManager::OnAudioDemuxCompleted,
+ &TrackBuffersManager::OnAudioDemuxFailed)
+ ->Track(mAudioTracks.mDemuxRequest);
+}
+
+void TrackBuffersManager::OnAudioDemuxCompleted(
+ RefPtr<MediaTrackDemuxer::SamplesHolder> aSamples) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ MSE_DEBUG("%zu audio samples demuxed", aSamples->GetSamples().Length());
+ // When using MSE, it's possible for each fragments to have their own
+ // duration, with a duration that is incorrectly rounded. Ignore the trimming
+ // information set by the demuxer to ensure a continous playback.
+ for (const auto& sample : aSamples->GetSamples()) {
+ sample->mOriginalPresentationWindow = Nothing();
+ }
+ mAudioTracks.mDemuxRequest.Complete();
+ mAudioTracks.mQueuedSamples.AppendElements(aSamples->GetSamples());
+ CompleteCodedFrameProcessing();
+
+ MaybeDispatchEncryptedEvent(aSamples->GetSamples());
+}
+
+void TrackBuffersManager::CompleteCodedFrameProcessing() {
+ MOZ_ASSERT(OnTaskQueue());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::CompleteCodedFrameProcessing",
+ MEDIA_PLAYBACK);
+
+ // 1. For each coded frame in the media segment run the following steps:
+ // Coded Frame Processing steps 1.1 to 1.21.
+
+ if (mSourceBufferAttributes->GetAppendMode() ==
+ SourceBufferAppendMode::Sequence &&
+ mVideoTracks.mQueuedSamples.Length() &&
+ mAudioTracks.mQueuedSamples.Length()) {
+ // When we are in sequence mode, the order in which we process the frames is
+ // important as it determines the future value of timestampOffset.
+ // So we process the earliest sample first. See bug 1293576.
+ TimeInterval videoInterval =
+ PresentationInterval(mVideoTracks.mQueuedSamples);
+ TimeInterval audioInterval =
+ PresentationInterval(mAudioTracks.mQueuedSamples);
+ if (audioInterval.mStart < videoInterval.mStart) {
+ ProcessFrames(mAudioTracks.mQueuedSamples, mAudioTracks);
+ ProcessFrames(mVideoTracks.mQueuedSamples, mVideoTracks);
+ } else {
+ ProcessFrames(mVideoTracks.mQueuedSamples, mVideoTracks);
+ ProcessFrames(mAudioTracks.mQueuedSamples, mAudioTracks);
+ }
+ } else {
+ ProcessFrames(mVideoTracks.mQueuedSamples, mVideoTracks);
+ ProcessFrames(mAudioTracks.mQueuedSamples, mAudioTracks);
+ }
+
+#if defined(DEBUG)
+ if (HasVideo()) {
+ const auto& track = mVideoTracks.GetTrackBuffer();
+ MOZ_ASSERT(track.IsEmpty() || track[0]->mKeyframe);
+ for (uint32_t i = 1; i < track.Length(); i++) {
+ MOZ_ASSERT(
+ (track[i - 1]->mTrackInfo->GetID() == track[i]->mTrackInfo->GetID() &&
+ track[i - 1]->mTimecode <= track[i]->mTimecode) ||
+ track[i]->mKeyframe);
+ }
+ }
+ if (HasAudio()) {
+ const auto& track = mAudioTracks.GetTrackBuffer();
+ MOZ_ASSERT(track.IsEmpty() || track[0]->mKeyframe);
+ for (uint32_t i = 1; i < track.Length(); i++) {
+ MOZ_ASSERT(
+ (track[i - 1]->mTrackInfo->GetID() == track[i]->mTrackInfo->GetID() &&
+ track[i - 1]->mTimecode <= track[i]->mTimecode) ||
+ track[i]->mKeyframe);
+ }
+ }
+#endif
+
+ mVideoTracks.mQueuedSamples.Clear();
+ mAudioTracks.mQueuedSamples.Clear();
+
+ UpdateBufferedRanges();
+
+ // Update our reported total size.
+ mSizeSourceBuffer = mVideoTracks.mSizeBuffer + mAudioTracks.mSizeBuffer;
+
+ // Return to step 6.4 of Segment Parser Loop algorithm
+ // 4. If this SourceBuffer is full and cannot accept more media data, then set
+ // the buffer full flag to true.
+ if (mSizeSourceBuffer >= EvictionThreshold()) {
+ mBufferFull = true;
+ }
+
+ // 5. If the input buffer does not contain a complete media segment, then jump
+ // to the need more data step below.
+ if (mParser->MediaSegmentRange().IsEmpty()) {
+ ResolveProcessing(true, __func__);
+ return;
+ }
+
+ mLastParsedEndTime = Some(std::max(mAudioTracks.mLastParsedEndTime,
+ mVideoTracks.mLastParsedEndTime));
+
+ // 6. Remove the media segment bytes from the beginning of the input buffer.
+ // Clear our demuxer from any already processed data.
+ int64_t safeToEvict =
+ std::min(HasVideo() ? mVideoTracks.mDemuxer->GetEvictionOffset(
+ mVideoTracks.mLastParsedEndTime)
+ : INT64_MAX,
+ HasAudio() ? mAudioTracks.mDemuxer->GetEvictionOffset(
+ mAudioTracks.mLastParsedEndTime)
+ : INT64_MAX);
+ mCurrentInputBuffer->EvictBefore(safeToEvict);
+
+ mInputDemuxer->NotifyDataRemoved();
+ RecreateParser(true);
+
+ // 7. Set append state to WAITING_FOR_SEGMENT.
+ SetAppendState(AppendState::WAITING_FOR_SEGMENT);
+
+ // 8. Jump to the loop top step above.
+ ResolveProcessing(false, __func__);
+}
+
+void TrackBuffersManager::RejectProcessing(const MediaResult& aRejectValue,
+ const char* aName) {
+ mProcessingPromise.RejectIfExists(aRejectValue, __func__);
+}
+
+void TrackBuffersManager::ResolveProcessing(bool aResolveValue,
+ const char* aName) {
+ mProcessingPromise.ResolveIfExists(aResolveValue, __func__);
+}
+
+void TrackBuffersManager::CheckSequenceDiscontinuity(
+ const TimeUnit& aPresentationTime) {
+ if (mSourceBufferAttributes->GetAppendMode() ==
+ SourceBufferAppendMode::Sequence &&
+ mSourceBufferAttributes->HaveGroupStartTimestamp()) {
+ mSourceBufferAttributes->SetTimestampOffset(
+ mSourceBufferAttributes->GetGroupStartTimestamp() - aPresentationTime);
+ mSourceBufferAttributes->SetGroupEndTimestamp(
+ mSourceBufferAttributes->GetGroupStartTimestamp());
+ mVideoTracks.mNeedRandomAccessPoint = true;
+ mAudioTracks.mNeedRandomAccessPoint = true;
+ mSourceBufferAttributes->ResetGroupStartTimestamp();
+ }
+}
+
+TimeInterval TrackBuffersManager::PresentationInterval(
+ const TrackBuffer& aSamples) const {
+ TimeInterval presentationInterval =
+ TimeInterval(aSamples[0]->mTime, aSamples[0]->GetEndTime());
+
+ for (uint32_t i = 1; i < aSamples.Length(); i++) {
+ auto& sample = aSamples[i];
+ presentationInterval = presentationInterval.Span(
+ TimeInterval(sample->mTime, sample->GetEndTime()));
+ }
+ return presentationInterval;
+}
+
+void TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples,
+ TrackData& aTrackData) {
+ AUTO_PROFILER_LABEL("TrackBuffersManager::ProcessFrames", MEDIA_PLAYBACK);
+ if (!aSamples.Length()) {
+ return;
+ }
+
+ // 1. If generate timestamps flag equals true
+ // Let presentation timestamp equal 0.
+ // Otherwise
+ // Let presentation timestamp be a double precision floating point
+ // representation of the coded frame's presentation timestamp in seconds.
+ TimeUnit presentationTimestamp = mSourceBufferAttributes->mGenerateTimestamps
+ ? TimeUnit::Zero()
+ : aSamples[0]->mTime;
+
+ // 3. If mode equals "sequence" and group start timestamp is set, then run the
+ // following steps:
+ CheckSequenceDiscontinuity(presentationTimestamp);
+
+ // 5. Let track buffer equal the track buffer that the coded frame will be
+ // added to.
+ auto& trackBuffer = aTrackData;
+
+ TimeIntervals samplesRange;
+ uint32_t sizeNewSamples = 0;
+ TrackBuffer samples; // array that will contain the frames to be added
+ // to our track buffer.
+
+ // We assume that no frames are contiguous within a media segment and as such
+ // don't need to check for discontinuity except for the first frame and should
+ // a frame be ignored due to the target window.
+ bool needDiscontinuityCheck = true;
+
+ // Highest presentation time seen in samples block.
+ TimeUnit highestSampleTime;
+
+ if (aSamples.Length()) {
+ aTrackData.mLastParsedEndTime = TimeUnit();
+ }
+
+ auto addToSamples = [&](MediaRawData* aSample,
+ const TimeInterval& aInterval) {
+ aSample->mTime = aInterval.mStart;
+ aSample->mDuration = aInterval.Length();
+ aSample->mTrackInfo = trackBuffer.mLastInfo;
+ MOZ_DIAGNOSTIC_ASSERT(aSample->HasValidTime());
+ samplesRange += aInterval;
+ sizeNewSamples += aSample->ComputedSizeOfIncludingThis();
+ samples.AppendElement(aSample);
+ };
+
+ // Will be set to the last frame dropped due to being outside mAppendWindow.
+ // It will be added prior the first following frame which can be added to the
+ // track buffer.
+ // This sample will be set with a duration of only 1us which will cause it to
+ // be dropped once returned by the decoder.
+ // This sample is required to "prime" the decoder so that the following frame
+ // can be fully decoded.
+ RefPtr<MediaRawData> previouslyDroppedSample;
+ for (auto& sample : aSamples) {
+ const TimeUnit sampleEndTime = sample->GetEndTime();
+ if (sampleEndTime > aTrackData.mLastParsedEndTime) {
+ aTrackData.mLastParsedEndTime = sampleEndTime;
+ }
+
+ // We perform step 10 right away as we can't do anything should a keyframe
+ // be needed until we have one.
+
+ // 10. If the need random access point flag on track buffer equals true,
+ // then run the following steps:
+ if (trackBuffer.mNeedRandomAccessPoint) {
+ // 1. If the coded frame is not a random access point, then drop the coded
+ // frame and jump to the top of the loop to start processing the next
+ // coded frame.
+ if (!sample->mKeyframe) {
+ previouslyDroppedSample = nullptr;
+ continue;
+ }
+ // 2. Set the need random access point flag on track buffer to false.
+ trackBuffer.mNeedRandomAccessPoint = false;
+ }
+
+ // We perform step 1,2 and 4 at once:
+ // 1. If generate timestamps flag equals true:
+ // Let presentation timestamp equal 0.
+ // Let decode timestamp equal 0.
+ // Otherwise:
+ // Let presentation timestamp be a double precision floating point
+ // representation of the coded frame's presentation timestamp in seconds.
+ // Let decode timestamp be a double precision floating point
+ // representation of the coded frame's decode timestamp in seconds.
+
+ // 2. Let frame duration be a double precision floating point representation
+ // of the coded frame's duration in seconds. Step 3 is performed earlier or
+ // when a discontinuity has been detected.
+ // 4. If timestampOffset is not 0, then run the following steps:
+
+ TimeUnit sampleTime = sample->mTime;
+ TimeUnit sampleTimecode = sample->mTimecode;
+ TimeUnit sampleDuration = sample->mDuration;
+ // Keep the timestamp, set by js, in the time base of the container.
+ TimeUnit timestampOffset =
+ mSourceBufferAttributes->GetTimestampOffset().ToBase(sample->mTime);
+
+ TimeInterval sampleInterval =
+ mSourceBufferAttributes->mGenerateTimestamps
+ ? TimeInterval(timestampOffset, timestampOffset + sampleDuration)
+ : TimeInterval(timestampOffset + sampleTime,
+ timestampOffset + sampleTime + sampleDuration);
+ TimeUnit decodeTimestamp = mSourceBufferAttributes->mGenerateTimestamps
+ ? timestampOffset
+ : timestampOffset + sampleTimecode;
+
+ SAMPLE_DEBUG(
+ "Processing %s frame [%" PRId64 ",%" PRId64 "] (adjusted:[%" PRId64
+ ",%" PRId64 "]), dts:%" PRId64 ", duration:%" PRId64 ", kf:%d)",
+ aTrackData.mInfo->mMimeType.get(), sample->mTime.ToMicroseconds(),
+ sample->GetEndTime().ToMicroseconds(),
+ sampleInterval.mStart.ToMicroseconds(),
+ sampleInterval.mEnd.ToMicroseconds(),
+ sample->mTimecode.ToMicroseconds(), sample->mDuration.ToMicroseconds(),
+ sample->mKeyframe);
+
+ // 6. If last decode timestamp for track buffer is set and decode timestamp
+ // is less than last decode timestamp: OR If last decode timestamp for track
+ // buffer is set and the difference between decode timestamp and last decode
+ // timestamp is greater than 2 times last frame duration:
+ if (needDiscontinuityCheck && trackBuffer.mLastDecodeTimestamp.isSome() &&
+ (decodeTimestamp < trackBuffer.mLastDecodeTimestamp.ref() ||
+ (decodeTimestamp - trackBuffer.mLastDecodeTimestamp.ref() >
+ trackBuffer.mLongestFrameDuration * 2))) {
+ MSE_DEBUG("Discontinuity detected.");
+ SourceBufferAppendMode appendMode =
+ mSourceBufferAttributes->GetAppendMode();
+
+ // 1a. If mode equals "segments":
+ if (appendMode == SourceBufferAppendMode::Segments) {
+ // Set group end timestamp to presentation timestamp.
+ mSourceBufferAttributes->SetGroupEndTimestamp(sampleInterval.mStart);
+ }
+ // 1b. If mode equals "sequence":
+ if (appendMode == SourceBufferAppendMode::Sequence) {
+ // Set group start timestamp equal to the group end timestamp.
+ mSourceBufferAttributes->SetGroupStartTimestamp(
+ mSourceBufferAttributes->GetGroupEndTimestamp());
+ }
+ for (auto& track : GetTracksList()) {
+ // 2. Unset the last decode timestamp on all track buffers.
+ // 3. Unset the last frame duration on all track buffers.
+ // 4. Unset the highest end timestamp on all track buffers.
+ // 5. Set the need random access point flag on all track buffers to
+ // true.
+ track->ResetAppendState();
+ }
+ // 6. Jump to the Loop Top step above to restart processing of the current
+ // coded frame. Rather that restarting the process for the frame, we run
+ // the first steps again instead.
+ // 3. If mode equals "sequence" and group start timestamp is set, then run
+ // the following steps:
+ TimeUnit presentationTimestamp =
+ mSourceBufferAttributes->mGenerateTimestamps ? TimeUnit()
+ : sampleTime;
+ CheckSequenceDiscontinuity(presentationTimestamp);
+
+ if (!sample->mKeyframe) {
+ previouslyDroppedSample = nullptr;
+ continue;
+ }
+ if (appendMode == SourceBufferAppendMode::Sequence) {
+ // mSourceBufferAttributes->GetTimestampOffset() was modified during
+ // CheckSequenceDiscontinuity. We need to update our variables.
+ timestampOffset = mSourceBufferAttributes->GetTimestampOffset();
+ sampleInterval =
+ mSourceBufferAttributes->mGenerateTimestamps
+ ? TimeInterval(timestampOffset,
+ timestampOffset + sampleDuration)
+ : TimeInterval(timestampOffset + sampleTime,
+ timestampOffset + sampleTime + sampleDuration);
+ decodeTimestamp = mSourceBufferAttributes->mGenerateTimestamps
+ ? timestampOffset
+ : timestampOffset + sampleTimecode;
+ }
+ trackBuffer.mNeedRandomAccessPoint = false;
+ needDiscontinuityCheck = false;
+ }
+
+ // 7. Let frame end timestamp equal the sum of presentation timestamp and
+ // frame duration. This is sampleInterval.mEnd
+
+ // 8. If presentation timestamp is less than appendWindowStart, then set the
+ // need random access point flag to true, drop the coded frame, and jump to
+ // the top of the loop to start processing the next coded frame.
+ // 9. If frame end timestamp is greater than appendWindowEnd, then set the
+ // need random access point flag to true, drop the coded frame, and jump to
+ // the top of the loop to start processing the next coded frame.
+ if (!mAppendWindow.ContainsStrict(sampleInterval)) {
+ if (mAppendWindow.IntersectsStrict(sampleInterval)) {
+ // 8. Note: Some implementations MAY choose to collect some of these
+ // coded frames with presentation timestamp less than
+ // appendWindowStart and use them to generate a splice at the first
+ // coded frame that has a presentation timestamp greater than or
+ // equal to appendWindowStart even if that frame is not a random
+ // access point. Supporting this requires multiple decoders or faster
+ // than real-time decoding so for now this behavior will not be a
+ // normative requirement.
+ // 9. Note: Some implementations MAY choose to collect coded frames with
+ // presentation timestamp less than appendWindowEnd and frame end
+ // timestamp greater than appendWindowEnd and use them to generate a
+ // splice across the portion of the collected coded frames within the
+ // append window at time of collection, and the beginning portion of
+ // later processed frames which only partially overlap the end of the
+ // collected coded frames. Supporting this requires multiple decoders
+ // or faster than real-time decoding so for now this behavior will
+ // not be a normative requirement. In conjunction with collecting
+ // coded frames that span appendWindowStart, implementations MAY thus
+ // support gapless audio splicing.
+ TimeInterval intersection = mAppendWindow.Intersection(sampleInterval);
+ sample->mOriginalPresentationWindow = Some(sampleInterval);
+ MSE_DEBUGV("will truncate frame from [%" PRId64 ",%" PRId64
+ "] to [%" PRId64 ",%" PRId64 "]",
+ sampleInterval.mStart.ToMicroseconds(),
+ sampleInterval.mEnd.ToMicroseconds(),
+ intersection.mStart.ToMicroseconds(),
+ intersection.mEnd.ToMicroseconds());
+ sampleInterval = intersection;
+ } else {
+ sample->mOriginalPresentationWindow = Some(sampleInterval);
+ sample->mTimecode = decodeTimestamp;
+ previouslyDroppedSample = sample;
+ MSE_DEBUGV("frame [%" PRId64 ",%" PRId64
+ "] outside appendWindow [%" PRId64 ",%" PRId64 "] dropping",
+ sampleInterval.mStart.ToMicroseconds(),
+ sampleInterval.mEnd.ToMicroseconds(),
+ mAppendWindow.mStart.ToMicroseconds(),
+ mAppendWindow.mEnd.ToMicroseconds());
+ if (samples.Length()) {
+ // We are creating a discontinuity in the samples.
+ // Insert the samples processed so far.
+ InsertFrames(samples, samplesRange, trackBuffer);
+ samples.Clear();
+ samplesRange = TimeIntervals();
+ trackBuffer.mSizeBuffer += sizeNewSamples;
+ sizeNewSamples = 0;
+ UpdateHighestTimestamp(trackBuffer, highestSampleTime);
+ }
+ trackBuffer.mNeedRandomAccessPoint = true;
+ needDiscontinuityCheck = true;
+ continue;
+ }
+ }
+ if (previouslyDroppedSample) {
+ MSE_DEBUGV("Adding silent frame");
+ // This "silent" sample will be added so that it starts exactly before the
+ // first usable one. The duration of the actual sample will be adjusted so
+ // that the total duration staty the same.
+ // Setting a dummy presentation window of 1us will cause this sample to be
+ // dropped after decoding by the AudioTrimmer (if audio).
+ TimeInterval previouslyDroppedSampleInterval =
+ TimeInterval(sampleInterval.mStart,
+ sampleInterval.mStart + TimeUnit::FromMicroseconds(1));
+ addToSamples(previouslyDroppedSample, previouslyDroppedSampleInterval);
+ previouslyDroppedSample = nullptr;
+ sampleInterval.mStart += previouslyDroppedSampleInterval.Length();
+ }
+
+ sample->mTimecode = decodeTimestamp;
+ addToSamples(sample, sampleInterval);
+
+ // Steps 11,12,13,14, 15 and 16 will be done in one block in InsertFrames.
+
+ trackBuffer.mLongestFrameDuration =
+ trackBuffer.mLastFrameDuration.isSome()
+ ? sample->mKeyframe
+ ? sampleDuration
+ : std::max(sampleDuration, trackBuffer.mLongestFrameDuration)
+ : sampleDuration;
+
+ // 17. Set last decode timestamp for track buffer to decode timestamp.
+ trackBuffer.mLastDecodeTimestamp = Some(decodeTimestamp);
+ // 18. Set last frame duration for track buffer to frame duration.
+ trackBuffer.mLastFrameDuration = Some(sampleDuration);
+
+ // 19. If highest end timestamp for track buffer is unset or frame end
+ // timestamp is greater than highest end timestamp, then set highest end
+ // timestamp for track buffer to frame end timestamp.
+ if (trackBuffer.mHighestEndTimestamp.isNothing() ||
+ sampleInterval.mEnd > trackBuffer.mHighestEndTimestamp.ref()) {
+ trackBuffer.mHighestEndTimestamp = Some(sampleInterval.mEnd);
+ }
+ if (sampleInterval.mStart > highestSampleTime) {
+ highestSampleTime = sampleInterval.mStart;
+ }
+ // 20. If frame end timestamp is greater than group end timestamp, then set
+ // group end timestamp equal to frame end timestamp.
+ if (sampleInterval.mEnd > mSourceBufferAttributes->GetGroupEndTimestamp()) {
+ mSourceBufferAttributes->SetGroupEndTimestamp(sampleInterval.mEnd);
+ }
+ // 21. If generate timestamps flag equals true, then set timestampOffset
+ // equal to frame end timestamp.
+ if (mSourceBufferAttributes->mGenerateTimestamps) {
+ mSourceBufferAttributes->SetTimestampOffset(sampleInterval.mEnd);
+ }
+ }
+
+ if (samples.Length()) {
+ InsertFrames(samples, samplesRange, trackBuffer);
+ trackBuffer.mSizeBuffer += sizeNewSamples;
+ UpdateHighestTimestamp(trackBuffer, highestSampleTime);
+ }
+}
+
+bool TrackBuffersManager::CheckNextInsertionIndex(TrackData& aTrackData,
+ const TimeUnit& aSampleTime) {
+ if (aTrackData.mNextInsertionIndex.isSome()) {
+ return true;
+ }
+
+ const TrackBuffer& data = aTrackData.GetTrackBuffer();
+
+ if (data.IsEmpty() || aSampleTime < aTrackData.mBufferedRanges.GetStart()) {
+ aTrackData.mNextInsertionIndex = Some(0u);
+ return true;
+ }
+
+ // Find which discontinuity we should insert the frame before.
+ TimeInterval target;
+ for (const auto& interval : aTrackData.mBufferedRanges) {
+ if (aSampleTime < interval.mStart) {
+ target = interval;
+ break;
+ }
+ }
+ if (target.IsEmpty()) {
+ // No target found, it will be added at the end of the track buffer.
+ aTrackData.mNextInsertionIndex = Some(uint32_t(data.Length()));
+ return true;
+ }
+ // We now need to find the first frame of the searched interval.
+ // We will insert our new frames right before.
+ for (uint32_t i = 0; i < data.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = data[i];
+ if (sample->mTime >= target.mStart ||
+ sample->GetEndTime() > target.mStart) {
+ aTrackData.mNextInsertionIndex = Some(i);
+ return true;
+ }
+ }
+ NS_ASSERTION(false, "Insertion Index Not Found");
+ return false;
+}
+
+void TrackBuffersManager::InsertFrames(TrackBuffer& aSamples,
+ const TimeIntervals& aIntervals,
+ TrackData& aTrackData) {
+ AUTO_PROFILER_LABEL("TrackBuffersManager::InsertFrames", MEDIA_PLAYBACK);
+ // 5. Let track buffer equal the track buffer that the coded frame will be
+ // added to.
+ auto& trackBuffer = aTrackData;
+
+ MSE_DEBUGV("Processing %zu %s frames(start:%" PRId64 " end:%" PRId64 ")",
+ aSamples.Length(), aTrackData.mInfo->mMimeType.get(),
+ aIntervals.GetStart().ToMicroseconds(),
+ aIntervals.GetEnd().ToMicroseconds());
+ if (profiler_thread_is_being_profiled_for_markers()) {
+ nsPrintfCString markerString(
+ "Processing %zu %s frames(start:%" PRId64 " end:%" PRId64 ")",
+ aSamples.Length(), aTrackData.mInfo->mMimeType.get(),
+ aIntervals.GetStart().ToMicroseconds(),
+ aIntervals.GetEnd().ToMicroseconds());
+ PROFILER_MARKER_TEXT("InsertFrames", MEDIA_PLAYBACK, {}, markerString);
+ }
+
+ // 11. Let spliced audio frame be an unset variable for holding audio splice
+ // information
+ // 12. Let spliced timed text frame be an unset variable for holding timed
+ // text splice information
+
+ // 13. If last decode timestamp for track buffer is unset and presentation
+ // timestamp falls within the presentation interval of a coded frame in track
+ // buffer,then run the following steps: For now we only handle replacing
+ // existing frames with the new ones. So we skip this step.
+
+ // 14. Remove existing coded frames in track buffer:
+ // a) If highest end timestamp for track buffer is not set:
+ // Remove all coded frames from track buffer that have a presentation
+ // timestamp greater than or equal to presentation timestamp and less
+ // than frame end timestamp.
+ // b) If highest end timestamp for track buffer is set and less than or
+ // equal to presentation timestamp:
+ // Remove all coded frames from track buffer that have a presentation
+ // timestamp greater than or equal to highest end timestamp and less than
+ // frame end timestamp
+
+ // There is an ambiguity on how to remove frames, which was lodged with:
+ // https://www.w3.org/Bugs/Public/show_bug.cgi?id=28710, implementing as per
+ // bug description.
+
+ // 15. Remove decoding dependencies of the coded frames removed in the
+ // previous step: Remove all coded frames between the coded frames removed in
+ // the previous step and the next random access point after those removed
+ // frames.
+
+ if (trackBuffer.mBufferedRanges.IntersectsStrict(aIntervals)) {
+ if (aSamples[0]->mKeyframe &&
+ (mType.Type() == MEDIAMIMETYPE("video/webm") ||
+ mType.Type() == MEDIAMIMETYPE("audio/webm"))) {
+ // We are starting a new GOP, we do not have to worry about breaking an
+ // existing current coded frame group. Reset the next insertion index
+ // so the search for when to start our frames removal can be exhaustive.
+ // This is a workaround for bug 1276184 and only until either bug 1277733
+ // or bug 1209386 is fixed.
+ // With the webm container, we can't always properly determine the
+ // duration of the last frame, which may cause the last frame of a cluster
+ // to overlap the following frame.
+ trackBuffer.mNextInsertionIndex.reset();
+ }
+ uint32_t index = RemoveFrames(aIntervals, trackBuffer,
+ trackBuffer.mNextInsertionIndex.refOr(0),
+ RemovalMode::kTruncateFrame);
+ if (index) {
+ trackBuffer.mNextInsertionIndex = Some(index);
+ }
+ }
+
+ // 16. Add the coded frame with the presentation timestamp, decode timestamp,
+ // and frame duration to the track buffer.
+ if (!CheckNextInsertionIndex(aTrackData, aSamples[0]->mTime)) {
+ RejectProcessing(NS_ERROR_FAILURE, __func__);
+ return;
+ }
+
+ // Adjust our demuxing index if necessary.
+ if (trackBuffer.mNextGetSampleIndex.isSome()) {
+ if (trackBuffer.mNextInsertionIndex.ref() ==
+ trackBuffer.mNextGetSampleIndex.ref() &&
+ aIntervals.GetEnd() >= trackBuffer.mNextSampleTime) {
+ MSE_DEBUG("Next sample to be played got overwritten");
+ trackBuffer.mNextGetSampleIndex.reset();
+ ResetEvictionIndex(trackBuffer);
+ } else if (trackBuffer.mNextInsertionIndex.ref() <=
+ trackBuffer.mNextGetSampleIndex.ref()) {
+ trackBuffer.mNextGetSampleIndex.ref() += aSamples.Length();
+ // We could adjust the eviction index so that the new data gets added to
+ // the evictable amount (as it is prior currentTime). However, considering
+ // new data is being added prior the current playback, it's likely that
+ // this data will be played next, and as such we probably don't want to
+ // have it evicted too early. So instead reset the eviction index instead.
+ ResetEvictionIndex(trackBuffer);
+ }
+ }
+
+ TrackBuffer& data = trackBuffer.GetTrackBuffer();
+ data.InsertElementsAt(trackBuffer.mNextInsertionIndex.ref(), aSamples);
+ trackBuffer.mNextInsertionIndex.ref() += aSamples.Length();
+
+ // Update our buffered range with new sample interval.
+ trackBuffer.mBufferedRanges += aIntervals;
+ // We allow a fuzz factor in our interval of half a frame length,
+ // as fuzz is +/- value, giving an effective leeway of a full frame
+ // length.
+ if (!aIntervals.IsEmpty()) {
+ TimeIntervals range(aIntervals);
+ range.SetFuzz(trackBuffer.mLongestFrameDuration / 2);
+ trackBuffer.mSanitizedBufferedRanges += range;
+ }
+}
+
+void TrackBuffersManager::UpdateHighestTimestamp(
+ TrackData& aTrackData, const media::TimeUnit& aHighestTime) {
+ if (aHighestTime > aTrackData.mHighestStartTimestamp) {
+ MutexAutoLock mut(mMutex);
+ aTrackData.mHighestStartTimestamp = aHighestTime;
+ }
+}
+
+uint32_t TrackBuffersManager::RemoveFrames(const TimeIntervals& aIntervals,
+ TrackData& aTrackData,
+ uint32_t aStartIndex,
+ RemovalMode aMode) {
+ AUTO_PROFILER_LABEL("TrackBuffersManager::RemoveFrames", MEDIA_PLAYBACK);
+ TrackBuffer& data = aTrackData.GetTrackBuffer();
+ Maybe<uint32_t> firstRemovedIndex;
+ uint32_t lastRemovedIndex = 0;
+
+ // We loop from aStartIndex to avoid removing frames that we inserted earlier
+ // and part of the current coded frame group. This is allows to handle step
+ // 14 of the coded frame processing algorithm without having to check the
+ // value of highest end timestamp: "Remove existing coded frames in track
+ // buffer:
+ // If highest end timestamp for track buffer is not set:
+ // Remove all coded frames from track buffer that have a presentation
+ // timestamp greater than or equal to presentation timestamp and less than
+ // frame end timestamp.
+ // If highest end timestamp for track buffer is set and less than or equal to
+ // presentation timestamp:
+ // Remove all coded frames from track buffer that have a presentation
+ // timestamp greater than or equal to highest end timestamp and less than
+ // frame end timestamp"
+ TimeUnit intervalsEnd = aIntervals.GetEnd();
+ for (uint32_t i = aStartIndex; i < data.Length(); i++) {
+ RefPtr<MediaRawData>& sample = data[i];
+ if (aIntervals.ContainsStrict(sample->mTime)) {
+ // The start of this existing frame will be overwritten, we drop that
+ // entire frame.
+ MSE_DEBUGV("overridding start of frame [%" PRId64 ",%" PRId64
+ "] with [%" PRId64 ",%" PRId64 "] dropping",
+ sample->mTime.ToMicroseconds(),
+ sample->GetEndTime().ToMicroseconds(),
+ aIntervals.GetStart().ToMicroseconds(),
+ aIntervals.GetEnd().ToMicroseconds());
+ if (firstRemovedIndex.isNothing()) {
+ firstRemovedIndex = Some(i);
+ }
+ lastRemovedIndex = i;
+ continue;
+ }
+ TimeInterval sampleInterval(sample->mTime, sample->GetEndTime());
+ if (aMode == RemovalMode::kTruncateFrame &&
+ aIntervals.IntersectsStrict(sampleInterval)) {
+ // The sample to be overwritten is only partially covered.
+ TimeIntervals intersection =
+ Intersection(aIntervals, TimeIntervals(sampleInterval));
+ bool found = false;
+ TimeUnit startTime = intersection.GetStart(&found);
+ MOZ_DIAGNOSTIC_ASSERT(found, "Must intersect with added coded frames");
+ Unused << found;
+ // Signal that this frame should be truncated when decoded.
+ if (!sample->mOriginalPresentationWindow) {
+ sample->mOriginalPresentationWindow = Some(sampleInterval);
+ }
+ MOZ_ASSERT(startTime > sample->mTime);
+ sample->mDuration = startTime - sample->mTime;
+ MOZ_DIAGNOSTIC_ASSERT(sample->mDuration.IsValid());
+ MSE_DEBUGV("partial overwrite of frame [%" PRId64 ",%" PRId64
+ "] with [%" PRId64 ",%" PRId64
+ "] trim to "
+ "[%" PRId64 ",%" PRId64 "]",
+ sampleInterval.mStart.ToMicroseconds(),
+ sampleInterval.mEnd.ToMicroseconds(),
+ aIntervals.GetStart().ToMicroseconds(),
+ aIntervals.GetEnd().ToMicroseconds(),
+ sample->mTime.ToMicroseconds(),
+ sample->GetEndTime().ToMicroseconds());
+ continue;
+ }
+
+ if (sample->mTime >= intervalsEnd) {
+ // We can break the loop now. All frames up to the next keyframe will be
+ // removed during the next step.
+ break;
+ }
+ }
+
+ if (firstRemovedIndex.isNothing()) {
+ return 0;
+ }
+
+ // Remove decoding dependencies of the coded frames removed in the previous
+ // step: Remove all coded frames between the coded frames removed in the
+ // previous step and the next random access point after those removed frames.
+ for (uint32_t i = lastRemovedIndex + 1; i < data.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = data[i];
+ if (sample->mKeyframe) {
+ break;
+ }
+ lastRemovedIndex = i;
+ }
+
+ TimeUnit maxSampleDuration;
+ uint32_t sizeRemoved = 0;
+ TimeIntervals removedIntervals;
+ for (uint32_t i = firstRemovedIndex.ref(); i <= lastRemovedIndex; i++) {
+ const RefPtr<MediaRawData> sample = data[i];
+ TimeInterval sampleInterval =
+ TimeInterval(sample->mTime, sample->GetEndTime());
+ removedIntervals += sampleInterval;
+ if (sample->mDuration > maxSampleDuration) {
+ maxSampleDuration = sample->mDuration;
+ }
+ sizeRemoved += sample->ComputedSizeOfIncludingThis();
+ }
+ aTrackData.mSizeBuffer -= sizeRemoved;
+
+ MSE_DEBUG("Removing frames from:%u (frames:%u) ([%f, %f))",
+ firstRemovedIndex.ref(),
+ lastRemovedIndex - firstRemovedIndex.ref() + 1,
+ removedIntervals.GetStart().ToSeconds(),
+ removedIntervals.GetEnd().ToSeconds());
+ if (profiler_thread_is_being_profiled_for_markers()) {
+ nsPrintfCString markerString(
+ "Removing frames from:%u (frames:%u) ([%f, %f))",
+ firstRemovedIndex.ref(), lastRemovedIndex - firstRemovedIndex.ref() + 1,
+ removedIntervals.GetStart().ToSeconds(),
+ removedIntervals.GetEnd().ToSeconds());
+ PROFILER_MARKER_TEXT("RemoveFrames", MEDIA_PLAYBACK, {}, markerString);
+ }
+
+ if (aTrackData.mNextGetSampleIndex.isSome()) {
+ if (aTrackData.mNextGetSampleIndex.ref() >= firstRemovedIndex.ref() &&
+ aTrackData.mNextGetSampleIndex.ref() <= lastRemovedIndex) {
+ MSE_DEBUG("Next sample to be played got evicted");
+ aTrackData.mNextGetSampleIndex.reset();
+ ResetEvictionIndex(aTrackData);
+ } else if (aTrackData.mNextGetSampleIndex.ref() > lastRemovedIndex) {
+ uint32_t samplesRemoved = lastRemovedIndex - firstRemovedIndex.ref() + 1;
+ aTrackData.mNextGetSampleIndex.ref() -= samplesRemoved;
+ if (aTrackData.mEvictionIndex.mLastIndex > lastRemovedIndex) {
+ MOZ_DIAGNOSTIC_ASSERT(
+ aTrackData.mEvictionIndex.mLastIndex >= samplesRemoved &&
+ aTrackData.mEvictionIndex.mEvictable >= sizeRemoved,
+ "Invalid eviction index");
+ MutexAutoLock mut(mMutex);
+ aTrackData.mEvictionIndex.mLastIndex -= samplesRemoved;
+ aTrackData.mEvictionIndex.mEvictable -= sizeRemoved;
+ } else {
+ ResetEvictionIndex(aTrackData);
+ }
+ }
+ }
+
+ if (aTrackData.mNextInsertionIndex.isSome()) {
+ if (aTrackData.mNextInsertionIndex.ref() > firstRemovedIndex.ref() &&
+ aTrackData.mNextInsertionIndex.ref() <= lastRemovedIndex + 1) {
+ aTrackData.ResetAppendState();
+ MSE_DEBUG("NextInsertionIndex got reset.");
+ } else if (aTrackData.mNextInsertionIndex.ref() > lastRemovedIndex + 1) {
+ aTrackData.mNextInsertionIndex.ref() -=
+ lastRemovedIndex - firstRemovedIndex.ref() + 1;
+ }
+ }
+
+ // Update our buffered range to exclude the range just removed.
+ aTrackData.mBufferedRanges -= removedIntervals;
+
+ // Recalculate sanitized buffered ranges.
+ aTrackData.mSanitizedBufferedRanges = aTrackData.mBufferedRanges;
+ aTrackData.mSanitizedBufferedRanges.SetFuzz(maxSampleDuration / 2);
+
+ data.RemoveElementsAt(firstRemovedIndex.ref(),
+ lastRemovedIndex - firstRemovedIndex.ref() + 1);
+
+ if (removedIntervals.GetEnd() >= aTrackData.mHighestStartTimestamp &&
+ removedIntervals.GetStart() <= aTrackData.mHighestStartTimestamp) {
+ // The sample with the highest presentation time got removed.
+ // Rescan the trackbuffer to determine the new one.
+ TimeUnit highestStartTime;
+ for (const auto& sample : data) {
+ if (sample->mTime > highestStartTime) {
+ highestStartTime = sample->mTime;
+ }
+ }
+ MutexAutoLock mut(mMutex);
+ aTrackData.mHighestStartTimestamp = highestStartTime;
+ }
+
+ return firstRemovedIndex.ref();
+}
+
+void TrackBuffersManager::RecreateParser(bool aReuseInitData) {
+ MOZ_ASSERT(OnTaskQueue());
+ // Recreate our parser for only the data remaining. This is required
+ // as it has parsed the entire InputBuffer provided.
+ // Once the old TrackBuffer/MediaSource implementation is removed
+ // we can optimize this part. TODO
+ if (mParser) {
+ DDUNLINKCHILD(mParser.get());
+ }
+ mParser = ContainerParser::CreateForMIMEType(mType);
+ DDLINKCHILD("parser", mParser.get());
+ if (aReuseInitData && mInitData) {
+ TimeUnit start, end;
+ mParser->ParseStartAndEndTimestamps(MediaSpan(mInitData), start, end);
+ mProcessedInput = mInitData->Length();
+ } else {
+ mProcessedInput = 0;
+ }
+}
+
+nsTArray<TrackBuffersManager::TrackData*> TrackBuffersManager::GetTracksList() {
+ nsTArray<TrackData*> tracks;
+ if (HasVideo()) {
+ tracks.AppendElement(&mVideoTracks);
+ }
+ if (HasAudio()) {
+ tracks.AppendElement(&mAudioTracks);
+ }
+ return tracks;
+}
+
+nsTArray<const TrackBuffersManager::TrackData*>
+TrackBuffersManager::GetTracksList() const {
+ nsTArray<const TrackData*> tracks;
+ if (HasVideo()) {
+ tracks.AppendElement(&mVideoTracks);
+ }
+ if (HasAudio()) {
+ tracks.AppendElement(&mAudioTracks);
+ }
+ return tracks;
+}
+
+void TrackBuffersManager::SetAppendState(AppendState aAppendState) {
+ MSE_DEBUG("AppendState changed from %s to %s",
+ AppendStateToStr(mSourceBufferAttributes->GetAppendState()),
+ AppendStateToStr(aAppendState));
+ mSourceBufferAttributes->SetAppendState(aAppendState);
+}
+
+MediaInfo TrackBuffersManager::GetMetadata() const {
+ MutexAutoLock mut(mMutex);
+ return mInfo;
+}
+
+const TimeIntervals& TrackBuffersManager::Buffered(
+ TrackInfo::TrackType aTrack) const {
+ MOZ_ASSERT(OnTaskQueue());
+ return GetTracksData(aTrack).mBufferedRanges;
+}
+
+const media::TimeUnit& TrackBuffersManager::HighestStartTime(
+ TrackInfo::TrackType aTrack) const {
+ MOZ_ASSERT(OnTaskQueue());
+ return GetTracksData(aTrack).mHighestStartTimestamp;
+}
+
+TimeIntervals TrackBuffersManager::SafeBuffered(
+ TrackInfo::TrackType aTrack) const {
+ MutexAutoLock mut(mMutex);
+ return aTrack == TrackInfo::kVideoTrack ? mVideoBufferedRanges
+ : mAudioBufferedRanges;
+}
+
+TimeUnit TrackBuffersManager::HighestStartTime() const {
+ MutexAutoLock mut(mMutex);
+ TimeUnit highestStartTime;
+ for (auto& track : GetTracksList()) {
+ highestStartTime =
+ std::max(track->mHighestStartTimestamp, highestStartTime);
+ }
+ return highestStartTime;
+}
+
+TimeUnit TrackBuffersManager::HighestEndTime() const {
+ MutexAutoLock mut(mMutex);
+
+ nsTArray<const TimeIntervals*> tracks;
+ if (HasVideo()) {
+ tracks.AppendElement(&mVideoBufferedRanges);
+ }
+ if (HasAudio()) {
+ tracks.AppendElement(&mAudioBufferedRanges);
+ }
+ return HighestEndTime(tracks);
+}
+
+TimeUnit TrackBuffersManager::HighestEndTime(
+ nsTArray<const TimeIntervals*>& aTracks) const {
+ mMutex.AssertCurrentThreadOwns();
+
+ TimeUnit highestEndTime;
+
+ for (const auto& trackRanges : aTracks) {
+ highestEndTime = std::max(trackRanges->GetEnd(), highestEndTime);
+ }
+ return highestEndTime;
+}
+
+void TrackBuffersManager::ResetEvictionIndex(TrackData& aTrackData) {
+ MutexAutoLock mut(mMutex);
+ aTrackData.mEvictionIndex.Reset();
+}
+
+void TrackBuffersManager::UpdateEvictionIndex(TrackData& aTrackData,
+ uint32_t currentIndex) {
+ uint32_t evictable = 0;
+ TrackBuffer& data = aTrackData.GetTrackBuffer();
+ MOZ_DIAGNOSTIC_ASSERT(currentIndex >= aTrackData.mEvictionIndex.mLastIndex,
+ "Invalid call");
+ MOZ_DIAGNOSTIC_ASSERT(
+ currentIndex == data.Length() || data[currentIndex]->mKeyframe,
+ "Must stop at keyframe");
+
+ for (uint32_t i = aTrackData.mEvictionIndex.mLastIndex; i < currentIndex;
+ i++) {
+ evictable += data[i]->ComputedSizeOfIncludingThis();
+ }
+ aTrackData.mEvictionIndex.mLastIndex = currentIndex;
+ MutexAutoLock mut(mMutex);
+ aTrackData.mEvictionIndex.mEvictable += evictable;
+}
+
+const TrackBuffersManager::TrackBuffer& TrackBuffersManager::GetTrackBuffer(
+ TrackInfo::TrackType aTrack) const {
+ MOZ_ASSERT(OnTaskQueue());
+ return GetTracksData(aTrack).GetTrackBuffer();
+}
+
+uint32_t TrackBuffersManager::FindSampleIndex(const TrackBuffer& aTrackBuffer,
+ const TimeInterval& aInterval) {
+ TimeUnit target = aInterval.mStart - aInterval.mFuzz;
+
+ for (uint32_t i = 0; i < aTrackBuffer.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = aTrackBuffer[i];
+ if (sample->mTime >= target || sample->GetEndTime() > target) {
+ return i;
+ }
+ }
+ NS_ASSERTION(false, "FindSampleIndex called with invalid arguments");
+
+ return 0;
+}
+
+TimeUnit TrackBuffersManager::Seek(TrackInfo::TrackType aTrack,
+ const TimeUnit& aTime,
+ const TimeUnit& aFuzz) {
+ MOZ_ASSERT(OnTaskQueue());
+ AUTO_PROFILER_LABEL("TrackBuffersManager::Seek", MEDIA_PLAYBACK);
+ auto& trackBuffer = GetTracksData(aTrack);
+ const TrackBuffersManager::TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ if (!track.Length()) {
+ // This a reset. It will be followed by another valid seek.
+ trackBuffer.mNextGetSampleIndex = Some(uint32_t(0));
+ trackBuffer.mNextSampleTimecode = TimeUnit();
+ trackBuffer.mNextSampleTime = TimeUnit();
+ ResetEvictionIndex(trackBuffer);
+ return TimeUnit();
+ }
+
+ uint32_t i = 0;
+
+ if (aTime != TimeUnit()) {
+ // Determine the interval of samples we're attempting to seek to.
+ TimeIntervals buffered = trackBuffer.mBufferedRanges;
+ // Fuzz factor is +/- aFuzz; as we want to only eliminate gaps
+ // that are less than aFuzz wide, we set a fuzz factor aFuzz/2.
+ buffered.SetFuzz(aFuzz / 2);
+ TimeIntervals::IndexType index = buffered.Find(aTime);
+ MOZ_ASSERT(index != TimeIntervals::NoIndex,
+ "We shouldn't be called if aTime isn't buffered");
+ TimeInterval target = buffered[index];
+ target.mFuzz = aFuzz;
+ i = FindSampleIndex(track, target);
+ }
+
+ Maybe<TimeUnit> lastKeyFrameTime;
+ TimeUnit lastKeyFrameTimecode;
+ uint32_t lastKeyFrameIndex = 0;
+ for (; i < track.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = track[i];
+ TimeUnit sampleTime = sample->mTime;
+ if (sampleTime > aTime && lastKeyFrameTime.isSome()) {
+ break;
+ }
+ if (sample->mKeyframe) {
+ lastKeyFrameTimecode = sample->mTimecode;
+ lastKeyFrameTime = Some(sampleTime);
+ lastKeyFrameIndex = i;
+ }
+ if (sampleTime == aTime ||
+ (sampleTime > aTime && lastKeyFrameTime.isSome())) {
+ break;
+ }
+ }
+ MSE_DEBUG("Keyframe %s found at %" PRId64 " @ %u",
+ lastKeyFrameTime.isSome() ? "" : "not",
+ lastKeyFrameTime.refOr(TimeUnit()).ToMicroseconds(),
+ lastKeyFrameIndex);
+
+ trackBuffer.mNextGetSampleIndex = Some(lastKeyFrameIndex);
+ trackBuffer.mNextSampleTimecode = lastKeyFrameTimecode;
+ trackBuffer.mNextSampleTime = lastKeyFrameTime.refOr(TimeUnit());
+ ResetEvictionIndex(trackBuffer);
+ UpdateEvictionIndex(trackBuffer, lastKeyFrameIndex);
+
+ return lastKeyFrameTime.refOr(TimeUnit());
+}
+
+uint32_t TrackBuffersManager::SkipToNextRandomAccessPoint(
+ TrackInfo::TrackType aTrack, const TimeUnit& aTimeThreadshold,
+ const media::TimeUnit& aFuzz, bool& aFound) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ AUTO_PROFILER_LABEL("TrackBuffersManager::SkipToNextRandomAccessPoint",
+ MEDIA_PLAYBACK);
+ uint32_t parsed = 0;
+ auto& trackData = GetTracksData(aTrack);
+ const TrackBuffer& track = GetTrackBuffer(aTrack);
+ aFound = false;
+
+ // SkipToNextRandomAccessPoint can only be called if aTimeThreadshold is known
+ // to be buffered.
+
+ if (NS_FAILED(SetNextGetSampleIndexIfNeeded(aTrack, aFuzz))) {
+ return 0;
+ }
+
+ TimeUnit nextSampleTimecode = trackData.mNextSampleTimecode;
+ TimeUnit nextSampleTime = trackData.mNextSampleTime;
+ uint32_t i = trackData.mNextGetSampleIndex.ref();
+ int32_t originalPos = i;
+
+ for (; i < track.Length(); i++) {
+ const MediaRawData* sample =
+ GetSample(aTrack, i, nextSampleTimecode, nextSampleTime, aFuzz);
+ if (!sample) {
+ break;
+ }
+ if (sample->mKeyframe && sample->mTime >= aTimeThreadshold) {
+ aFound = true;
+ break;
+ }
+ nextSampleTimecode = sample->GetEndTimecode();
+ nextSampleTime = sample->GetEndTime();
+ parsed++;
+ }
+
+ // Adjust the next demux time and index so that the next call to
+ // SkipToNextRandomAccessPoint will not count again the parsed sample as
+ // skipped.
+ if (aFound) {
+ trackData.mNextSampleTimecode = track[i]->mTimecode;
+ trackData.mNextSampleTime = track[i]->mTime;
+ trackData.mNextGetSampleIndex = Some(i);
+ } else if (i > 0) {
+ // Go back to the previous keyframe or the original position so the next
+ // demux can succeed and be decoded.
+ for (int j = i - 1; j >= originalPos; j--) {
+ const RefPtr<MediaRawData>& sample = track[j];
+ if (sample->mKeyframe) {
+ trackData.mNextSampleTimecode = sample->mTimecode;
+ trackData.mNextSampleTime = sample->mTime;
+ trackData.mNextGetSampleIndex = Some(uint32_t(j));
+ // We are unable to skip to a keyframe past aTimeThreshold, however
+ // we are speeding up decoding by dropping the unplayable frames.
+ // So we can mark aFound as true.
+ aFound = true;
+ break;
+ }
+ parsed--;
+ }
+ }
+
+ if (aFound) {
+ UpdateEvictionIndex(trackData, trackData.mNextGetSampleIndex.ref());
+ }
+
+ return parsed;
+}
+
+const MediaRawData* TrackBuffersManager::GetSample(TrackInfo::TrackType aTrack,
+ uint32_t aIndex,
+ const TimeUnit& aExpectedDts,
+ const TimeUnit& aExpectedPts,
+ const TimeUnit& aFuzz) {
+ MOZ_ASSERT(OnTaskQueue());
+ const TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ if (aIndex >= track.Length()) {
+ // reached the end.
+ return nullptr;
+ }
+
+ if (!(aExpectedDts + aFuzz).IsValid() || !(aExpectedPts + aFuzz).IsValid()) {
+ // Time overflow, it seems like we also reached the end.
+ return nullptr;
+ }
+
+ const RefPtr<MediaRawData>& sample = track[aIndex];
+ if (!aIndex || sample->mTimecode <= aExpectedDts + aFuzz ||
+ sample->mTime <= aExpectedPts + aFuzz) {
+ MOZ_DIAGNOSTIC_ASSERT(sample->HasValidTime());
+ return sample;
+ }
+
+ // Gap is too big. End of Stream or Waiting for Data.
+ // TODO, check that we have continuous data based on the sanitized buffered
+ // range instead.
+ return nullptr;
+}
+
+already_AddRefed<MediaRawData> TrackBuffersManager::GetSample(
+ TrackInfo::TrackType aTrack, const TimeUnit& aFuzz, MediaResult& aResult) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+ AUTO_PROFILER_LABEL("TrackBuffersManager::GetSample", MEDIA_PLAYBACK);
+ auto& trackData = GetTracksData(aTrack);
+ const TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ aResult = NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA;
+
+ if (trackData.mNextGetSampleIndex.isSome()) {
+ if (trackData.mNextGetSampleIndex.ref() >= track.Length()) {
+ aResult = NS_ERROR_DOM_MEDIA_END_OF_STREAM;
+ return nullptr;
+ }
+ const MediaRawData* sample = GetSample(
+ aTrack, trackData.mNextGetSampleIndex.ref(),
+ trackData.mNextSampleTimecode, trackData.mNextSampleTime, aFuzz);
+ if (!sample) {
+ return nullptr;
+ }
+
+ RefPtr<MediaRawData> p = sample->Clone();
+ if (!p) {
+ aResult = MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
+ return nullptr;
+ }
+ if (p->mKeyframe) {
+ UpdateEvictionIndex(trackData, trackData.mNextGetSampleIndex.ref());
+ }
+ trackData.mNextGetSampleIndex.ref()++;
+ // Estimate decode timestamp and timestamp of the next sample.
+ TimeUnit nextSampleTimecode = sample->GetEndTimecode();
+ TimeUnit nextSampleTime = sample->GetEndTime();
+ const MediaRawData* nextSample =
+ GetSample(aTrack, trackData.mNextGetSampleIndex.ref(),
+ nextSampleTimecode, nextSampleTime, aFuzz);
+ if (nextSample) {
+ // We have a valid next sample, can use exact values.
+ trackData.mNextSampleTimecode = nextSample->mTimecode;
+ trackData.mNextSampleTime = nextSample->mTime;
+ } else {
+ // Next sample isn't available yet. Use estimates.
+ trackData.mNextSampleTimecode = nextSampleTimecode;
+ trackData.mNextSampleTime = nextSampleTime;
+ }
+ aResult = NS_OK;
+ return p.forget();
+ }
+
+ aResult = SetNextGetSampleIndexIfNeeded(aTrack, aFuzz);
+
+ if (NS_FAILED(aResult)) {
+ return nullptr;
+ }
+
+ MOZ_RELEASE_ASSERT(trackData.mNextGetSampleIndex.isSome() &&
+ trackData.mNextGetSampleIndex.ref() < track.Length());
+ const RefPtr<MediaRawData>& sample =
+ track[trackData.mNextGetSampleIndex.ref()];
+ RefPtr<MediaRawData> p = sample->Clone();
+ if (!p) {
+ // OOM
+ aResult = MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
+ return nullptr;
+ }
+ MOZ_DIAGNOSTIC_ASSERT(p->HasValidTime());
+
+ // Find the previous keyframe to calculate the evictable amount.
+ uint32_t i = trackData.mNextGetSampleIndex.ref();
+ for (; !track[i]->mKeyframe; i--) {
+ }
+ UpdateEvictionIndex(trackData, i);
+
+ trackData.mNextGetSampleIndex.ref()++;
+ trackData.mNextSampleTimecode = sample->GetEndTimecode();
+ trackData.mNextSampleTime = sample->GetEndTime();
+ return p.forget();
+}
+
+int32_t TrackBuffersManager::FindCurrentPosition(TrackInfo::TrackType aTrack,
+ const TimeUnit& aFuzz) const {
+ MOZ_ASSERT(OnTaskQueue());
+ auto& trackData = GetTracksData(aTrack);
+ const TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ // Perform an exact search first.
+ for (uint32_t i = 0; i < track.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = track[i];
+ TimeInterval sampleInterval{sample->mTimecode, sample->GetEndTimecode()};
+
+ if (sampleInterval.ContainsStrict(trackData.mNextSampleTimecode)) {
+ return i;
+ }
+ if (sampleInterval.mStart > trackData.mNextSampleTimecode) {
+ // Samples are ordered by timecode. There's no need to search
+ // any further.
+ break;
+ }
+ }
+
+ for (uint32_t i = 0; i < track.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = track[i];
+ TimeInterval sampleInterval{sample->mTimecode, sample->GetEndTimecode(),
+ aFuzz};
+
+ if (sampleInterval.ContainsWithStrictEnd(trackData.mNextSampleTimecode)) {
+ return i;
+ }
+ if (sampleInterval.mStart - aFuzz > trackData.mNextSampleTimecode) {
+ // Samples are ordered by timecode. There's no need to search
+ // any further.
+ break;
+ }
+ }
+
+ // We couldn't find our sample by decode timestamp. Attempt to find it using
+ // presentation timestamp. There will likely be small jerkiness.
+ for (uint32_t i = 0; i < track.Length(); i++) {
+ const RefPtr<MediaRawData>& sample = track[i];
+ TimeInterval sampleInterval{sample->mTime, sample->GetEndTime(), aFuzz};
+
+ if (sampleInterval.ContainsWithStrictEnd(trackData.mNextSampleTimecode)) {
+ return i;
+ }
+ }
+
+ // Still not found.
+ return -1;
+}
+
+uint32_t TrackBuffersManager::Evictable(TrackInfo::TrackType aTrack) const {
+ MutexAutoLock mut(mMutex);
+ return GetTracksData(aTrack).mEvictionIndex.mEvictable;
+}
+
+TimeUnit TrackBuffersManager::GetNextRandomAccessPoint(
+ TrackInfo::TrackType aTrack, const TimeUnit& aFuzz) {
+ mTaskQueueCapability->AssertOnCurrentThread();
+
+ // So first determine the current position in the track buffer if necessary.
+ if (NS_FAILED(SetNextGetSampleIndexIfNeeded(aTrack, aFuzz))) {
+ return TimeUnit::FromInfinity();
+ }
+
+ auto& trackData = GetTracksData(aTrack);
+ const TrackBuffersManager::TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ uint32_t i = trackData.mNextGetSampleIndex.ref();
+ TimeUnit nextSampleTimecode = trackData.mNextSampleTimecode;
+ TimeUnit nextSampleTime = trackData.mNextSampleTime;
+
+ for (; i < track.Length(); i++) {
+ const MediaRawData* sample =
+ GetSample(aTrack, i, nextSampleTimecode, nextSampleTime, aFuzz);
+ if (!sample) {
+ break;
+ }
+ if (sample->mKeyframe) {
+ return sample->mTime;
+ }
+ nextSampleTimecode = sample->GetEndTimecode();
+ nextSampleTime = sample->GetEndTime();
+ }
+ return TimeUnit::FromInfinity();
+}
+
+nsresult TrackBuffersManager::SetNextGetSampleIndexIfNeeded(
+ TrackInfo::TrackType aTrack, const TimeUnit& aFuzz) {
+ MOZ_ASSERT(OnTaskQueue());
+ auto& trackData = GetTracksData(aTrack);
+ const TrackBuffer& track = GetTrackBuffer(aTrack);
+
+ if (trackData.mNextGetSampleIndex.isSome()) {
+ // We already know the next GetSample index.
+ return NS_OK;
+ }
+
+ if (!track.Length()) {
+ // There's nothing to find yet.
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
+ }
+
+ if (trackData.mNextSampleTimecode == TimeUnit()) {
+ // First demux, get first sample.
+ trackData.mNextGetSampleIndex = Some(0u);
+ return NS_OK;
+ }
+
+ if (trackData.mNextSampleTimecode > track.LastElement()->GetEndTimecode()) {
+ // The next element is past our last sample. We're done.
+ trackData.mNextGetSampleIndex = Some(uint32_t(track.Length()));
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
+ }
+
+ int32_t pos = FindCurrentPosition(aTrack, aFuzz);
+ if (pos < 0) {
+ // Not found, must wait for more data.
+ MSE_DEBUG("Couldn't find sample (pts:%" PRId64 " dts:%" PRId64 ")",
+ trackData.mNextSampleTime.ToMicroseconds(),
+ trackData.mNextSampleTimecode.ToMicroseconds());
+ return NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA;
+ }
+ trackData.mNextGetSampleIndex = Some(uint32_t(pos));
+ return NS_OK;
+}
+
+void TrackBuffersManager::TrackData::AddSizeOfResources(
+ MediaSourceDecoder::ResourceSizes* aSizes) const {
+ for (const TrackBuffer& buffer : mBuffers) {
+ for (const MediaRawData* data : buffer) {
+ aSizes->mByteSize += data->SizeOfIncludingThis(aSizes->mMallocSizeOf);
+ }
+ }
+}
+
+RefPtr<GenericPromise> TrackBuffersManager::RequestDebugInfo(
+ dom::TrackBuffersManagerDebugInfo& aInfo) const {
+ const RefPtr<TaskQueue> taskQueue = GetTaskQueueSafe();
+ if (!taskQueue) {
+ return GenericPromise::CreateAndResolve(true, __func__);
+ }
+ if (!taskQueue->IsCurrentThreadIn()) {
+ // Run the request on the task queue if it's not already.
+ return InvokeAsync(taskQueue.get(), __func__,
+ [this, self = RefPtr{this}, &aInfo] {
+ return RequestDebugInfo(aInfo);
+ });
+ }
+ mTaskQueueCapability->AssertOnCurrentThread();
+ GetDebugInfo(aInfo);
+ return GenericPromise::CreateAndResolve(true, __func__);
+}
+
+void TrackBuffersManager::GetDebugInfo(
+ dom::TrackBuffersManagerDebugInfo& aInfo) const {
+ MOZ_ASSERT(OnTaskQueue(),
+ "This shouldn't be called off the task queue because we're about "
+ "to touch a lot of data that is used on the task queue");
+ CopyUTF8toUTF16(mType.Type().AsString(), aInfo.mType);
+
+ if (HasAudio()) {
+ aInfo.mNextSampleTime = mAudioTracks.mNextSampleTime.ToSeconds();
+ aInfo.mNumSamples = mAudioTracks.mBuffers[0].Length();
+ aInfo.mBufferSize = mAudioTracks.mSizeBuffer;
+ aInfo.mEvictable = Evictable(TrackInfo::kAudioTrack);
+ aInfo.mNextGetSampleIndex = mAudioTracks.mNextGetSampleIndex.valueOr(-1);
+ aInfo.mNextInsertionIndex = mAudioTracks.mNextInsertionIndex.valueOr(-1);
+ media::TimeIntervals ranges = SafeBuffered(TrackInfo::kAudioTrack);
+ dom::Sequence<dom::BufferRange> items;
+ for (uint32_t i = 0; i < ranges.Length(); ++i) {
+ // dom::Sequence is a FallibleTArray
+ dom::BufferRange* range = items.AppendElement(fallible);
+ if (!range) {
+ break;
+ }
+ range->mStart = ranges.Start(i).ToSeconds();
+ range->mEnd = ranges.End(i).ToSeconds();
+ }
+ aInfo.mRanges = std::move(items);
+ } else if (HasVideo()) {
+ aInfo.mNextSampleTime = mVideoTracks.mNextSampleTime.ToSeconds();
+ aInfo.mNumSamples = mVideoTracks.mBuffers[0].Length();
+ aInfo.mBufferSize = mVideoTracks.mSizeBuffer;
+ aInfo.mEvictable = Evictable(TrackInfo::kVideoTrack);
+ aInfo.mNextGetSampleIndex = mVideoTracks.mNextGetSampleIndex.valueOr(-1);
+ aInfo.mNextInsertionIndex = mVideoTracks.mNextInsertionIndex.valueOr(-1);
+ media::TimeIntervals ranges = SafeBuffered(TrackInfo::kVideoTrack);
+ dom::Sequence<dom::BufferRange> items;
+ for (uint32_t i = 0; i < ranges.Length(); ++i) {
+ // dom::Sequence is a FallibleTArray
+ dom::BufferRange* range = items.AppendElement(fallible);
+ if (!range) {
+ break;
+ }
+ range->mStart = ranges.Start(i).ToSeconds();
+ range->mEnd = ranges.End(i).ToSeconds();
+ }
+ aInfo.mRanges = std::move(items);
+ }
+}
+
+void TrackBuffersManager::AddSizeOfResources(
+ MediaSourceDecoder::ResourceSizes* aSizes) const {
+ mTaskQueueCapability->AssertOnCurrentThread();
+
+ if (mInputBuffer.isSome() && mInputBuffer->Buffer()) {
+ // mInputBuffer should be the sole owner of the underlying buffer, so this
+ // won't double count.
+ aSizes->mByteSize += mInputBuffer->Buffer()->ShallowSizeOfIncludingThis(
+ aSizes->mMallocSizeOf);
+ }
+ if (mInitData) {
+ aSizes->mByteSize +=
+ mInitData->ShallowSizeOfIncludingThis(aSizes->mMallocSizeOf);
+ }
+ if (mPendingInputBuffer.isSome() && mPendingInputBuffer->Buffer()) {
+ // mPendingInputBuffer should be the sole owner of the underlying buffer, so
+ // this won't double count.
+ aSizes->mByteSize +=
+ mPendingInputBuffer->Buffer()->ShallowSizeOfIncludingThis(
+ aSizes->mMallocSizeOf);
+ }
+
+ mVideoTracks.AddSizeOfResources(aSizes);
+ mAudioTracks.AddSizeOfResources(aSizes);
+}
+
+} // namespace mozilla
+#undef MSE_DEBUG
+#undef MSE_DEBUGV
+#undef SAMPLE_DEBUG
diff --git a/dom/media/mediasource/TrackBuffersManager.h b/dom/media/mediasource/TrackBuffersManager.h
new file mode 100644
index 0000000000..0b3b64b8fe
--- /dev/null
+++ b/dom/media/mediasource/TrackBuffersManager.h
@@ -0,0 +1,568 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_TRACKBUFFERSMANAGER_H_
+#define MOZILLA_TRACKBUFFERSMANAGER_H_
+
+#include "mozilla/Atomics.h"
+#include "mozilla/EventTargetCapability.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/Mutex.h"
+#include "mozilla/NotNull.h"
+#include "mozilla/TaskQueue.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
+
+#include "MediaContainerType.h"
+#include "MediaData.h"
+#include "MediaDataDemuxer.h"
+#include "MediaResult.h"
+#include "MediaSourceDecoder.h"
+#include "MediaSpan.h"
+#include "SourceBufferTask.h"
+#include "TimeUnits.h"
+#include "nsTArray.h"
+
+namespace mozilla {
+
+class AbstractThread;
+class ContainerParser;
+class MediaByteBuffer;
+class MediaRawData;
+class MediaSourceDemuxer;
+class SourceBufferResource;
+
+class SourceBufferTaskQueue {
+ public:
+ SourceBufferTaskQueue() = default;
+
+ ~SourceBufferTaskQueue() {
+ MOZ_ASSERT(mQueue.IsEmpty(), "All tasks must have been processed");
+ }
+
+ void Push(SourceBufferTask* aTask) { mQueue.AppendElement(aTask); }
+
+ already_AddRefed<SourceBufferTask> Pop() {
+ if (!mQueue.Length()) {
+ return nullptr;
+ }
+ RefPtr<SourceBufferTask> task = std::move(mQueue[0]);
+ mQueue.RemoveElementAt(0);
+ return task.forget();
+ }
+
+ nsTArray<RefPtr<SourceBufferTask>>::size_type Length() const {
+ return mQueue.Length();
+ }
+
+ private:
+ nsTArray<RefPtr<SourceBufferTask>> mQueue;
+};
+
+DDLoggedTypeDeclName(TrackBuffersManager);
+
+class TrackBuffersManager final
+ : public DecoderDoctorLifeLogger<TrackBuffersManager> {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffersManager);
+
+ enum class EvictDataResult : int8_t {
+ NO_DATA_EVICTED,
+ CANT_EVICT,
+ BUFFER_FULL,
+ };
+
+ typedef TrackInfo::TrackType TrackType;
+ typedef MediaData::Type MediaType;
+ typedef nsTArray<RefPtr<MediaRawData>> TrackBuffer;
+ typedef SourceBufferTask::AppendPromise AppendPromise;
+ typedef SourceBufferTask::RangeRemovalPromise RangeRemovalPromise;
+
+ // Interface for SourceBuffer
+ TrackBuffersManager(MediaSourceDecoder* aParentDecoder,
+ const MediaContainerType& aType);
+
+ // Queue a task to add data to the end of the input buffer and run the MSE
+ // Buffer Append Algorithm
+ // 3.5.5 Buffer Append Algorithm.
+ // http://w3c.github.io/media-source/index.html#sourcebuffer-buffer-append
+ RefPtr<AppendPromise> AppendData(already_AddRefed<MediaByteBuffer> aData,
+ const SourceBufferAttributes& aAttributes);
+
+ // Queue a task to abort any pending AppendData.
+ // Does nothing at this stage.
+ void AbortAppendData();
+
+ // Queue a task to run MSE Reset Parser State Algorithm.
+ // 3.5.2 Reset Parser State
+ void ResetParserState(SourceBufferAttributes& aAttributes);
+
+ // Queue a task to run the MSE range removal algorithm.
+ // http://w3c.github.io/media-source/#sourcebuffer-coded-frame-removal
+ RefPtr<RangeRemovalPromise> RangeRemoval(media::TimeUnit aStart,
+ media::TimeUnit aEnd);
+
+ // Schedule data eviction if necessary as the next call to AppendData will
+ // add aSize bytes.
+ // Eviction is done in two steps, first remove data up to aPlaybackTime
+ // and if still more space is needed remove from the end.
+ EvictDataResult EvictData(const media::TimeUnit& aPlaybackTime,
+ int64_t aSize);
+
+ // Queue a task to run ChangeType
+ void ChangeType(const MediaContainerType& aType);
+
+ // Returns the buffered range currently managed.
+ // This may be called on any thread.
+ // Buffered must conform to
+ // http://w3c.github.io/media-source/index.html#widl-SourceBuffer-buffered
+ media::TimeIntervals Buffered() const;
+ media::TimeUnit HighestStartTime() const;
+ media::TimeUnit HighestEndTime() const;
+
+ // Return the size of the data managed by this SourceBufferContentManager.
+ int64_t GetSize() const;
+
+ // Indicate that the MediaSource parent object got into "ended" state.
+ void Ended();
+
+ // The parent SourceBuffer is about to be destroyed.
+ void Detach();
+
+ int64_t EvictionThreshold() const;
+
+ // Interface for MediaSourceDemuxer
+ MediaInfo GetMetadata() const;
+ const TrackBuffer& GetTrackBuffer(TrackInfo::TrackType aTrack) const;
+ const media::TimeIntervals& Buffered(TrackInfo::TrackType) const;
+ const media::TimeUnit& HighestStartTime(TrackInfo::TrackType) const;
+ media::TimeIntervals SafeBuffered(TrackInfo::TrackType) const;
+ bool IsEnded() const { return mEnded; }
+ uint32_t Evictable(TrackInfo::TrackType aTrack) const;
+ media::TimeUnit Seek(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aTime,
+ const media::TimeUnit& aFuzz);
+ uint32_t SkipToNextRandomAccessPoint(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aTimeThreadshold,
+ const media::TimeUnit& aFuzz,
+ bool& aFound);
+
+ already_AddRefed<MediaRawData> GetSample(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aFuzz,
+ MediaResult& aResult);
+ int32_t FindCurrentPosition(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aFuzz) const
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ // Will set the next GetSample index if needed. This information is determined
+ // through the value of mNextSampleTimecode. Return false if the index
+ // couldn't be determined or if there's nothing more that could be demuxed.
+ // This occurs if either the track buffer doesn't contain the required
+ // timecode or is empty.
+ nsresult SetNextGetSampleIndexIfNeeded(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aFuzz)
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ media::TimeUnit GetNextRandomAccessPoint(TrackInfo::TrackType aTrack,
+ const media::TimeUnit& aFuzz);
+
+ // Requests that the TrackBuffersManager populates aInfo with debug
+ // information. This may be done asynchronously, and aInfo should *not* be
+ // accessed by the caller until the returned promise is resolved or rejected.
+ RefPtr<GenericPromise> RequestDebugInfo(
+ dom::TrackBuffersManagerDebugInfo& aInfo) const;
+ void AddSizeOfResources(MediaSourceDecoder::ResourceSizes* aSizes) const;
+
+ private:
+ typedef MozPromise<bool, MediaResult, /* IsExclusive = */ true>
+ CodedFrameProcessingPromise;
+
+ ~TrackBuffersManager();
+ // All following functions run on the taskqueue.
+ RefPtr<AppendPromise> DoAppendData(already_AddRefed<MediaByteBuffer> aData,
+ const SourceBufferAttributes& aAttributes);
+ void ScheduleSegmentParserLoop() MOZ_REQUIRES(mTaskQueueCapability);
+ void SegmentParserLoop() MOZ_REQUIRES(mTaskQueueCapability);
+ void InitializationSegmentReceived() MOZ_REQUIRES(mTaskQueueCapability);
+ void ShutdownDemuxers() MOZ_REQUIRES(mTaskQueueCapability);
+ void CreateDemuxerforMIMEType() MOZ_REQUIRES(mTaskQueueCapability);
+ void ResetDemuxingState() MOZ_REQUIRES(mTaskQueueCapability);
+ void NeedMoreData() MOZ_REQUIRES(mTaskQueueCapability);
+ void RejectAppend(const MediaResult& aRejectValue, const char* aName)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ // Will return a promise that will be resolved once all frames of the current
+ // media segment have been processed.
+ RefPtr<CodedFrameProcessingPromise> CodedFrameProcessing()
+ MOZ_REQUIRES(mTaskQueueCapability);
+ void CompleteCodedFrameProcessing() MOZ_REQUIRES(mTaskQueueCapability);
+ // Called by ResetParserState.
+ void CompleteResetParserState() MOZ_REQUIRES(mTaskQueueCapability);
+ RefPtr<RangeRemovalPromise> CodedFrameRemovalWithPromise(
+ media::TimeInterval aInterval) MOZ_REQUIRES(mTaskQueueCapability);
+ bool CodedFrameRemoval(media::TimeInterval aInterval)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ // Removes all coded frames -- this is not to spec and should be used as a
+ // last resort to clear buffers only if other methods cannot.
+ void RemoveAllCodedFrames() MOZ_REQUIRES(mTaskQueueCapability);
+ void SetAppendState(SourceBufferAttributes::AppendState aAppendState)
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ bool HasVideo() const { return mVideoTracks.mNumTracks > 0; }
+ bool HasAudio() const { return mAudioTracks.mNumTracks > 0; }
+
+ // The input buffer as per
+ // http://w3c.github.io/media-source/index.html#sourcebuffer-input-buffer
+ Maybe<MediaSpan> mInputBuffer MOZ_GUARDED_BY(mTaskQueueCapability);
+ // Buffer full flag as per
+ // https://w3c.github.io/media-source/#sourcebuffer-buffer-full-flag. Accessed
+ // on both the main thread and the task queue.
+ Atomic<bool> mBufferFull;
+ bool mFirstInitializationSegmentReceived MOZ_GUARDED_BY(mTaskQueueCapability);
+ bool mChangeTypeReceived MOZ_GUARDED_BY(mTaskQueueCapability);
+ // Set to true once a new segment is started.
+ bool mNewMediaSegmentStarted MOZ_GUARDED_BY(mTaskQueueCapability);
+ bool mActiveTrack MOZ_GUARDED_BY(mTaskQueueCapability);
+ MediaContainerType mType MOZ_GUARDED_BY(mTaskQueueCapability);
+
+ // ContainerParser objects and methods.
+ // Those are used to parse the incoming input buffer.
+
+ // Recreate the ContainerParser and if aReuseInitData is true then
+ // feed it with the previous init segment found.
+ void RecreateParser(bool aReuseInitData) MOZ_REQUIRES(mTaskQueueCapability);
+ UniquePtr<ContainerParser> mParser;
+
+ // Demuxer objects and methods.
+ void AppendDataToCurrentInputBuffer(const MediaSpan& aData)
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ RefPtr<MediaByteBuffer> mInitData MOZ_GUARDED_BY(mTaskQueueCapability);
+
+ // Checks if a new set of init data is a repeat of the last set of init data
+ // received. Because streams may retransmit the same init data (or
+ // functionally equivalent init data) we do not want to perform costly
+ // operations each time we receive init data, only when it's actually
+ // different data.
+ bool IsRepeatInitData(const MediaInfo& aNewMediaInfo) const
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ // Temporary input buffer to handle partial media segment header.
+ // We store the current input buffer content into it should we need to
+ // reinitialize the demuxer once we have some samples and a discontinuity is
+ // detected.
+ Maybe<MediaSpan> mPendingInputBuffer MOZ_GUARDED_BY(mTaskQueueCapability);
+ RefPtr<SourceBufferResource> mCurrentInputBuffer
+ MOZ_GUARDED_BY(mTaskQueueCapability);
+ RefPtr<MediaDataDemuxer> mInputDemuxer MOZ_GUARDED_BY(mTaskQueueCapability);
+ // Length already processed in current media segment.
+ uint64_t mProcessedInput MOZ_GUARDED_BY(mTaskQueueCapability);
+ Maybe<media::TimeUnit> mLastParsedEndTime
+ MOZ_GUARDED_BY(mTaskQueueCapability);
+
+ void OnDemuxerInitDone(const MediaResult& aResult);
+ void OnDemuxerInitFailed(const MediaResult& aFailure);
+ void OnDemuxerResetDone(const MediaResult& aResult)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ MozPromiseRequestHolder<MediaDataDemuxer::InitPromise> mDemuxerInitRequest;
+
+ void OnDemuxFailed(TrackType aTrack, const MediaResult& aError)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ void DoDemuxVideo() MOZ_REQUIRES(mTaskQueueCapability);
+ void OnVideoDemuxCompleted(RefPtr<MediaTrackDemuxer::SamplesHolder> aSamples);
+ void OnVideoDemuxFailed(const MediaResult& aError) {
+ mVideoTracks.mDemuxRequest.Complete();
+ mTaskQueueCapability->AssertOnCurrentThread();
+ OnDemuxFailed(TrackType::kVideoTrack, aError);
+ }
+ void DoDemuxAudio() MOZ_REQUIRES(mTaskQueueCapability);
+ void OnAudioDemuxCompleted(RefPtr<MediaTrackDemuxer::SamplesHolder> aSamples);
+ void OnAudioDemuxFailed(const MediaResult& aError) {
+ mAudioTracks.mDemuxRequest.Complete();
+ mTaskQueueCapability->AssertOnCurrentThread();
+ OnDemuxFailed(TrackType::kAudioTrack, aError);
+ }
+
+ // Dispatches an "encrypted" event is any sample in array has initData
+ // present.
+ void MaybeDispatchEncryptedEvent(
+ const nsTArray<RefPtr<MediaRawData>>& aSamples);
+
+ void DoEvictData(const media::TimeUnit& aPlaybackTime, int64_t aSizeToEvict)
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ void GetDebugInfo(dom::TrackBuffersManagerDebugInfo& aInfo) const
+ MOZ_REQUIRES(mTaskQueueCapability);
+
+ struct TrackData {
+ TrackData() : mNumTracks(0), mNeedRandomAccessPoint(true), mSizeBuffer(0) {}
+ Atomic<uint32_t> mNumTracks;
+ // Definition of variables:
+ // https://w3c.github.io/media-source/#track-buffers
+ // Last decode timestamp variable that stores the decode timestamp of the
+ // last coded frame appended in the current coded frame group.
+ // The variable is initially unset to indicate that no coded frames have
+ // been appended yet.
+ Maybe<media::TimeUnit> mLastDecodeTimestamp;
+ // Last frame duration variable that stores the coded frame duration of the
+ // last coded frame appended in the current coded frame group.
+ // The variable is initially unset to indicate that no coded frames have
+ // been appended yet.
+ Maybe<media::TimeUnit> mLastFrameDuration;
+ // Highest end timestamp variable that stores the highest coded frame end
+ // timestamp across all coded frames in the current coded frame group that
+ // were appended to this track buffer.
+ // The variable is initially unset to indicate that no coded frames have
+ // been appended yet.
+ Maybe<media::TimeUnit> mHighestEndTimestamp;
+ // Highest presentation timestamp in track buffer.
+ // Protected by global monitor, except when reading on the task queue as it
+ // is only written there.
+ media::TimeUnit mHighestStartTimestamp;
+ // Longest frame duration seen since last random access point.
+ // Only ever accessed when mLastDecodeTimestamp and mLastFrameDuration are
+ // set.
+ media::TimeUnit mLongestFrameDuration;
+ // Need random access point flag variable that keeps track of whether the
+ // track buffer is waiting for a random access point coded frame.
+ // The variable is initially set to true to indicate that random access
+ // point coded frame is needed before anything can be added to the track
+ // buffer.
+ bool mNeedRandomAccessPoint;
+ RefPtr<MediaTrackDemuxer> mDemuxer;
+ MozPromiseRequestHolder<MediaTrackDemuxer::SamplesPromise> mDemuxRequest;
+ // Highest end timestamp of the last media segment demuxed.
+ media::TimeUnit mLastParsedEndTime;
+
+ // If set, position where the next contiguous frame will be inserted.
+ // If a discontinuity is detected, it will be unset and recalculated upon
+ // the next insertion.
+ Maybe<uint32_t> mNextInsertionIndex;
+ // Samples just demuxed, but not yet parsed.
+ TrackBuffer mQueuedSamples;
+ const TrackBuffer& GetTrackBuffer() const {
+ MOZ_RELEASE_ASSERT(mBuffers.Length(),
+ "TrackBuffer must have been created");
+ return mBuffers.LastElement();
+ }
+ TrackBuffer& GetTrackBuffer() {
+ MOZ_RELEASE_ASSERT(mBuffers.Length(),
+ "TrackBuffer must have been created");
+ return mBuffers.LastElement();
+ }
+ // We only manage a single track of each type at this time.
+ nsTArray<TrackBuffer> mBuffers;
+ // Track buffer ranges variable that represents the presentation time ranges
+ // occupied by the coded frames currently stored in the track buffer.
+ media::TimeIntervals mBufferedRanges;
+ // Sanitized mBufferedRanges with a fuzz of half a sample's duration applied
+ // This buffered ranges is the basis of what is exposed to the JS.
+ media::TimeIntervals mSanitizedBufferedRanges;
+ // Byte size of all samples contained in this track buffer.
+ uint32_t mSizeBuffer;
+ // TrackInfo of the first metadata received.
+ RefPtr<TrackInfoSharedPtr> mInfo;
+ // TrackInfo of the last metadata parsed (updated with each init segment.
+ RefPtr<TrackInfoSharedPtr> mLastInfo;
+
+ // If set, position of the next sample to be retrieved by GetSample().
+ // If the position is equal to the TrackBuffer's length, it indicates that
+ // we've reached EOS.
+ Maybe<uint32_t> mNextGetSampleIndex;
+ // Approximation of the next sample's decode timestamp.
+ media::TimeUnit mNextSampleTimecode;
+ // Approximation of the next sample's presentation timestamp.
+ media::TimeUnit mNextSampleTime;
+
+ struct EvictionIndex {
+ EvictionIndex() { Reset(); }
+ void Reset() {
+ mEvictable = 0;
+ mLastIndex = 0;
+ }
+ uint32_t mEvictable;
+ uint32_t mLastIndex;
+ };
+ // Size of data that can be safely evicted during the next eviction
+ // cycle.
+ // We consider as evictable all frames up to the last keyframe prior to
+ // mNextGetSampleIndex. If mNextGetSampleIndex isn't set, then we assume
+ // that we can't yet evict data.
+ // Protected by global monitor, except when reading on the task queue as it
+ // is only written there.
+ EvictionIndex mEvictionIndex;
+
+ void ResetAppendState() {
+ mLastDecodeTimestamp.reset();
+ mLastFrameDuration.reset();
+ mHighestEndTimestamp.reset();
+ mNeedRandomAccessPoint = true;
+ mNextInsertionIndex.reset();
+ }
+
+ void Reset() {
+ ResetAppendState();
+ mEvictionIndex.Reset();
+ for (auto& buffer : mBuffers) {
+ buffer.Clear();
+ }
+ mSizeBuffer = 0;
+ mNextGetSampleIndex.reset();
+ mBufferedRanges.Clear();
+ mSanitizedBufferedRanges.Clear();
+ }
+
+ void AddSizeOfResources(MediaSourceDecoder::ResourceSizes* aSizes) const;
+ };
+
+ void CheckSequenceDiscontinuity(const media::TimeUnit& aPresentationTime)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ void ProcessFrames(TrackBuffer& aSamples, TrackData& aTrackData)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ media::TimeInterval PresentationInterval(const TrackBuffer& aSamples) const
+ MOZ_REQUIRES(mTaskQueueCapability);
+ bool CheckNextInsertionIndex(TrackData& aTrackData,
+ const media::TimeUnit& aSampleTime)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ void InsertFrames(TrackBuffer& aSamples,
+ const media::TimeIntervals& aIntervals,
+ TrackData& aTrackData) MOZ_REQUIRES(mTaskQueueCapability);
+ void UpdateHighestTimestamp(TrackData& aTrackData,
+ const media::TimeUnit& aHighestTime)
+ MOZ_REQUIRES(mTaskQueueCapability);
+ // Remove all frames and their dependencies contained in aIntervals.
+ // Return the index at which frames were first removed or 0 if no frames
+ // removed.
+ enum class RemovalMode {
+ kRemoveFrame,
+ kTruncateFrame,
+ };
+ uint32_t RemoveFrames(const media::TimeIntervals& aIntervals,
+ TrackData& aTrackData, uint32_t aStartIndex,
+ RemovalMode aMode);
+ // Recalculate track's evictable amount.
+ void ResetEvictionIndex(TrackData& aTrackData);
+ void UpdateEvictionIndex(TrackData& aTrackData, uint32_t aCurrentIndex);
+ // Find index of sample. Return a negative value if not found.
+ uint32_t FindSampleIndex(const TrackBuffer& aTrackBuffer,
+ const media::TimeInterval& aInterval);
+ const MediaRawData* GetSample(TrackInfo::TrackType aTrack, uint32_t aIndex,
+ const media::TimeUnit& aExpectedDts,
+ const media::TimeUnit& aExpectedPts,
+ const media::TimeUnit& aFuzz);
+ void UpdateBufferedRanges();
+ void RejectProcessing(const MediaResult& aRejectValue, const char* aName);
+ void ResolveProcessing(bool aResolveValue, const char* aName);
+ MozPromiseRequestHolder<CodedFrameProcessingPromise> mProcessingRequest;
+ MozPromiseHolder<CodedFrameProcessingPromise> mProcessingPromise;
+
+ // Trackbuffers definition.
+ nsTArray<const TrackData*> GetTracksList() const;
+ nsTArray<TrackData*> GetTracksList();
+ TrackData& GetTracksData(TrackType aTrack) {
+ switch (aTrack) {
+ case TrackType::kVideoTrack:
+ return mVideoTracks;
+ case TrackType::kAudioTrack:
+ default:
+ return mAudioTracks;
+ }
+ }
+ const TrackData& GetTracksData(TrackType aTrack) const {
+ switch (aTrack) {
+ case TrackType::kVideoTrack:
+ return mVideoTracks;
+ case TrackType::kAudioTrack:
+ default:
+ return mAudioTracks;
+ }
+ }
+ TrackData mVideoTracks;
+ TrackData mAudioTracks;
+
+ // TaskQueue methods and objects.
+ RefPtr<TaskQueue> GetTaskQueueSafe() const {
+ MutexAutoLock mut(mMutex);
+ return mTaskQueue;
+ }
+ NotNull<AbstractThread*> TaskQueueFromTaskQueue() const {
+#ifdef DEBUG
+ RefPtr<TaskQueue> taskQueue = GetTaskQueueSafe();
+ MOZ_ASSERT(taskQueue && taskQueue->IsCurrentThreadIn());
+#endif
+ return WrapNotNull(mTaskQueue.get());
+ }
+ bool OnTaskQueue() const {
+ auto taskQueue = TaskQueueFromTaskQueue();
+ return taskQueue->IsCurrentThreadIn();
+ }
+ void ResetTaskQueue() {
+ MutexAutoLock mut(mMutex);
+ mTaskQueue = nullptr;
+ }
+
+ // SourceBuffer Queues and running context.
+ SourceBufferTaskQueue mQueue;
+ void QueueTask(SourceBufferTask* aTask);
+ void ProcessTasks();
+ // Set if the TrackBuffersManager is currently processing a task.
+ // At this stage, this task is always a AppendBufferTask.
+ RefPtr<SourceBufferTask> mCurrentTask MOZ_GUARDED_BY(mTaskQueueCapability);
+ // Current SourceBuffer state for ongoing task.
+ // Its content is returned to the SourceBuffer once the AppendBufferTask has
+ // completed.
+ UniquePtr<SourceBufferAttributes> mSourceBufferAttributes
+ MOZ_GUARDED_BY(mTaskQueueCapability);
+ // The current sourcebuffer append window. It's content is equivalent to
+ // mSourceBufferAttributes.mAppendWindowStart/End
+ media::TimeInterval mAppendWindow MOZ_GUARDED_BY(mTaskQueueCapability);
+
+ // Strong references to external objects.
+ nsMainThreadPtrHandle<MediaSourceDecoder> mParentDecoder;
+
+ const RefPtr<AbstractThread> mAbstractMainThread;
+
+ // Return public highest end time across all aTracks.
+ // Monitor must be held.
+ media::TimeUnit HighestEndTime(
+ nsTArray<const media::TimeIntervals*>& aTracks) const;
+
+ // Set to true if mediasource state changed to ended.
+ Atomic<bool> mEnded;
+
+ // Global size of this source buffer content.
+ Atomic<int64_t> mSizeSourceBuffer;
+ const int64_t mVideoEvictionThreshold;
+ const int64_t mAudioEvictionThreshold;
+ enum class EvictionState {
+ NO_EVICTION_NEEDED,
+ EVICTION_NEEDED,
+ EVICTION_COMPLETED,
+ };
+ Atomic<EvictionState> mEvictionState;
+
+ // Monitor to protect following objects accessed across multiple threads.
+ mutable Mutex mMutex MOZ_UNANNOTATED;
+ // mTaskQueue is only ever written after construction on the task queue.
+ // As such, it can be accessed while on task queue without the need for the
+ // mutex.
+ RefPtr<TaskQueue> mTaskQueue;
+ // Stable audio and video track time ranges.
+ media::TimeIntervals mVideoBufferedRanges;
+ media::TimeIntervals mAudioBufferedRanges;
+ // MediaInfo of the first init segment read.
+ MediaInfo mInfo;
+ // End mutex protected members.
+
+ // EventTargetCapability used to ensure we're running on the task queue
+ // as expected for various accesses.
+ // TODO: we could store only this and dispatch to it, rather than also having
+ // mTaskQueue. However, there's special locking around mTaskQueue, so we keep
+ // both for now.
+ Maybe<EventTargetCapability<TaskQueue>> mTaskQueueCapability;
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_TRACKBUFFERSMANAGER_H_ */
diff --git a/dom/media/mediasource/gtest/TestContainerParser.cpp b/dom/media/mediasource/gtest/TestContainerParser.cpp
new file mode 100644
index 0000000000..dd5a87b64b
--- /dev/null
+++ b/dom/media/mediasource/gtest/TestContainerParser.cpp
@@ -0,0 +1,148 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <gtest/gtest.h>
+#include <stdint.h>
+
+#include "ContainerParser.h"
+#include "mozilla/ArrayUtils.h"
+#include "mozilla/gtest/MozAssertions.h"
+
+using namespace mozilla;
+using TimeUnit = mozilla::media::TimeUnit;
+
+TEST(ContainerParser, MIMETypes)
+{
+ const char* containerTypes[] = {"video/webm", "audio/webm", "video/mp4",
+ "audio/mp4", "audio/aac"};
+ UniquePtr<ContainerParser> parser;
+ for (size_t i = 0; i < ArrayLength(containerTypes); ++i) {
+ Maybe<MediaContainerType> containerType =
+ MakeMediaContainerType(containerTypes[i]);
+ ASSERT_TRUE(containerType.isSome());
+ parser = ContainerParser::CreateForMIMEType(*containerType);
+ ASSERT_NE(parser, nullptr);
+ }
+}
+
+already_AddRefed<MediaByteBuffer> make_adts_header() {
+ const uint8_t test[] = {0xff, 0xf1, 0x50, 0x80, 0x03, 0x1f, 0xfc};
+ RefPtr<MediaByteBuffer> buffer(new MediaByteBuffer);
+ buffer->AppendElements(test, ArrayLength(test));
+ return buffer.forget();
+}
+
+TEST(ContainerParser, ADTSHeader)
+{
+ UniquePtr<ContainerParser> parser;
+ parser = ContainerParser::CreateForMIMEType(
+ MediaContainerType(MEDIAMIMETYPE("audio/aac")));
+ ASSERT_NE(parser, nullptr);
+
+ // Audio data should have no gaps.
+ EXPECT_EQ(parser->GetRoundingError(), 0);
+
+ // Test a valid header.
+ RefPtr<MediaByteBuffer> header = make_adts_header();
+ EXPECT_NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header)));
+
+ // Test variations.
+ uint8_t save = header->ElementAt(1);
+ for (uint8_t i = 1; i < 3; ++i) {
+ // Set non-zero layer.
+ header->ReplaceElementAt(1, (header->ElementAt(1) & 0xf9) | (i << 1));
+ EXPECT_FALSE(NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header))))
+ << "Accepted non-zero layer in header.";
+ }
+ header->ReplaceElementAt(1, save);
+ save = header->ElementAt(2);
+ header->ReplaceElementAt(2, (header->ElementAt(2) & 0x3b) | (15 << 2));
+ EXPECT_FALSE(NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header))))
+ << "Accepted explicit frequency in header.";
+ header->ReplaceElementAt(2, save);
+
+ // Test a short header.
+ header->SetLength(6);
+ EXPECT_FALSE(NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header))))
+ << "Accepted too-short header.";
+ EXPECT_FALSE(NS_SUCCEEDED(parser->IsMediaSegmentPresent(MediaSpan(header))))
+ << "Found media segment when there was just a partial header.";
+
+ // Test a header with short data.
+ header = make_adts_header();
+ header->AppendElements(1);
+ EXPECT_TRUE(NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header))))
+ << "Rejected a valid header.";
+ EXPECT_TRUE(NS_SUCCEEDED(parser->IsMediaSegmentPresent(MediaSpan(header))))
+ << "Rejected a one-byte media segment.";
+
+ // Test parse results.
+ header = make_adts_header();
+ EXPECT_FALSE(NS_SUCCEEDED(parser->IsMediaSegmentPresent(MediaSpan(header))))
+ << "Found media segment when there was just a header.";
+ TimeUnit start;
+ TimeUnit end;
+ EXPECT_TRUE(NS_FAILED(
+ parser->ParseStartAndEndTimestamps(MediaSpan(header), start, end)));
+
+ EXPECT_TRUE(parser->HasInitData());
+ EXPECT_TRUE(parser->HasCompleteInitData());
+ MediaByteBuffer* init = parser->InitData();
+ ASSERT_NE(init, nullptr);
+ EXPECT_EQ(init->Length(), header->Length());
+
+ EXPECT_EQ(parser->InitSegmentRange(),
+ MediaByteRange(0, int64_t(header->Length())));
+ // Media segment range should be empty here.
+ EXPECT_EQ(parser->MediaHeaderRange(), MediaByteRange());
+ EXPECT_EQ(parser->MediaSegmentRange(), MediaByteRange());
+}
+
+TEST(ContainerParser, ADTSBlankMedia)
+{
+ UniquePtr<ContainerParser> parser;
+ parser = ContainerParser::CreateForMIMEType(
+ MediaContainerType(MEDIAMIMETYPE("audio/aac")));
+ ASSERT_NE(parser, nullptr);
+
+ // Audio data should have no gaps.
+ EXPECT_EQ(parser->GetRoundingError(), 0);
+
+ // Test the header only.
+ RefPtr<MediaByteBuffer> header = make_adts_header();
+ EXPECT_NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header)));
+
+ // Test with the correct length of (invalid) frame data.
+ size_t header_length = header->Length();
+ size_t data_length = 24;
+ size_t frame_length = header_length + data_length;
+ header->AppendElements(data_length);
+ EXPECT_TRUE(NS_SUCCEEDED(parser->IsInitSegmentPresent(MediaSpan(header))))
+ << "Rejected a valid header.";
+ EXPECT_TRUE(NS_SUCCEEDED(parser->IsMediaSegmentPresent(MediaSpan(header))))
+ << "Rejected a full (but zeroed) media segment.";
+ TimeUnit start;
+ TimeUnit end;
+ // We don't report timestamps from ADTS.
+ EXPECT_TRUE(NS_FAILED(
+ parser->ParseStartAndEndTimestamps(MediaSpan(header), start, end)));
+ EXPECT_TRUE(start.IsZero());
+ EXPECT_TRUE(end.IsZero());
+
+ // Verify the parser calculated header and packet data boundaries.
+ EXPECT_TRUE(parser->HasInitData());
+ EXPECT_TRUE(parser->HasCompleteInitData());
+ MediaByteBuffer* init = parser->InitData();
+ ASSERT_NE(init, nullptr);
+ EXPECT_EQ(init->Length(), header_length)
+ << "Found incorrect init segment length.";
+ EXPECT_EQ(parser->InitSegmentRange(),
+ MediaByteRange(0, int64_t(header_length)));
+ // In ADTS the Media Header is the same as the Media Segment.
+ MediaByteRange expected_media =
+ MediaByteRange(int64_t(header_length), int64_t(frame_length));
+ EXPECT_EQ(parser->MediaHeaderRange(), expected_media);
+ EXPECT_EQ(parser->MediaSegmentRange(), expected_media);
+}
diff --git a/dom/media/mediasource/gtest/TestExtractAV1CodecDetails.cpp b/dom/media/mediasource/gtest/TestExtractAV1CodecDetails.cpp
new file mode 100644
index 0000000000..8683023204
--- /dev/null
+++ b/dom/media/mediasource/gtest/TestExtractAV1CodecDetails.cpp
@@ -0,0 +1,290 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <gtest/gtest.h>
+#include <stdint.h>
+
+#include "AOMDecoder.h"
+#include "VideoUtils.h"
+
+namespace mozilla {
+void PrintTo(const AOMDecoder::AV1SequenceInfo& aInfo, std::ostream* aStream) {
+ nsAutoCString formatted = nsAutoCString();
+ formatted.AppendPrintf(
+ "av01.%01u.%02u%c.%02u.%01u.%01u%01u%01u.%02u.%02u.%02u.%01u (res: "
+ "%ux%u) operating points: [",
+ aInfo.mProfile, aInfo.mOperatingPoints[0].mLevel,
+ aInfo.mOperatingPoints[0].mTier == 1 ? 'H' : 'M', aInfo.mBitDepth,
+ aInfo.mMonochrome, aInfo.mSubsamplingX, aInfo.mSubsamplingY,
+ static_cast<uint8_t>(aInfo.mChromaSamplePosition),
+ static_cast<uint8_t>(aInfo.mColorSpace.mPrimaries),
+ static_cast<uint8_t>(aInfo.mColorSpace.mTransfer),
+ static_cast<uint8_t>(aInfo.mColorSpace.mMatrix),
+ static_cast<uint8_t>(aInfo.mColorSpace.mRange), aInfo.mImage.Width(),
+ aInfo.mImage.Height());
+ size_t opCount = aInfo.mOperatingPoints.Length();
+ for (size_t i = 0; i < opCount; i++) {
+ const auto& op = aInfo.mOperatingPoints[i];
+ formatted.AppendPrintf("{ layers: %x, level: %u, tier: %u }", op.mLayers,
+ op.mLevel, op.mTier);
+ if (i != opCount - 1) {
+ formatted.Append(", ");
+ }
+ }
+ formatted.Append("]");
+ *aStream << formatted;
+}
+} // namespace mozilla
+
+using namespace mozilla;
+
+struct AV1TestData {
+ const char* mCodecParameterString;
+ const bool mExpectedValue;
+ const char* mComment;
+};
+
+TEST(ExtractAV1CodecDetails, TestInputData)
+{
+ AV1TestData tests[] = {
+ // Format is:
+ // av01.N.NN[MH].NN.B.BBN.NN.NN.NN.B
+ // where
+ // N = decimal digit
+ // [] = single character
+ // B = binary digit
+ // Field order:
+ // <sample entry 4CC>.<profile>.<level><tier>.<bitDepth>
+ // [.<monochrome>.<chromaSubsampling>
+ // .<colorPrimaries>.<transferCharacteristics>.<matrixCoefficients>
+ // .<videoFullRangeFlag>]
+
+ // Format checks
+ {"av01.0.10M.08", true, "Minimum length"},
+ {"av1.0.10M.08", false, "Invalid 4CC"},
+ {"av01..10M.08", false, "Blank field"},
+ {"av01.-1.10M.08", false, "Negative field"},
+ {"av01.0.10M.8", false, "Missing leading zeros"},
+
+ // Field counts
+ {"av01", false, "0 of 4 required fields"},
+ {"av01.0", false, "1 of 4 required fields"},
+ {"av01.0.10", false, "2 of 4 required fields"},
+ {"av01.0.10M", false, "3 of 4 required fields"},
+ {"av01.0.10M.08.0", false, "5 fields, AV1 requires 4 or 10"},
+ {"av01.0.10M.08.0.110.01.01.01", false, "9 fields, AV1 requires 4 or 10"},
+ {"av01.0.10M.08.0.110.01.01.01.0", true, "Maximum fields"},
+ {"av01.0.10M.08.0.110.01.01.01.0.0", false, "Too many fields"},
+
+ // "Comments" are allowed (unknown characters at the end of fields)
+ {"av01.0.10M.08this is ignored", true, "Minimum length with comment"},
+ {"av01.0.10Mbad comment", false, "Comment before required field"},
+ {"av01.0.10M.08.0.110.01.01.01.0also ignored", true,
+ "Maximum length with comment"},
+
+ // Begin field checks
+
+ // -- Profile --
+ // Main Profile (0) tested above
+
+ // High Profile requires 4:4:4 chroma subsampling without monochrome
+ {"av01.1.10M.08", false, "High Profile (1) without parameters"},
+ {"av01.1.10M.08.0.000.01.01.01.0", true, "High Profile (1)"},
+
+ // Professional requires either of:
+ // - 8bit or 10bit at 4:2:2
+ // - 12bit at any subsampling
+ {"av01.2.10M.10.0.100.01.01.01.0", true,
+ "Professional Profile (2) 10-bit 4:2:2"},
+ {"av01.2.10M.12.0.110.01.01.01.0", true,
+ "Professional Profile (2) 12-bit 4:2:0"},
+
+ {"av01.3.10M.12.0.000.01.01.01.0", false, "Invalid Profile 3"},
+
+ // -- Level --
+ {"av01.0.00M.08", true, "Level 0 (2.1)"},
+ // Level 4.2 (10) tested above
+ {"av01.0.14M.08", true, "Level 14 (5.2)"},
+ {"av01.0.23M.08", true, "Level 23 (7.3)"},
+ {"av01.0.24M.08", false, "Level 24 (Reserved)"},
+
+ // -- Tier --
+ // Main tier tested above
+ {"av01.0.10H.08", true, "High tier"},
+
+ // -- Bit depth --
+ // 8-bit tested above with Main and High Profiles
+ {"av01.0.10M.10", true, "Main 10-bit"},
+ {"av01.1.10M.10.0.000.01.01.01.0", true, "High 10-bit"},
+ {"av01.1.10M.12.0.000.01.01.01.0", false, "High 12-bit (Invalid)"},
+ // Valid 12-bit tested for Professional Profile
+
+ // -- Monochrome --
+ // Monochrome off tested above
+ {"av01.0.10M.08.1.110.01.01.01.0", true, "Main 8-bit monochrome"},
+ {"av01.1.10M.10.1.000.01.01.01.0", false,
+ "4:4:4 is incompatible with monochrome"},
+ {"av01.2.10M.10.1.100.01.01.01.0", false,
+ "4:2:0 is incompatible with monochrome"},
+ {"av01.2.10M.12.1.110.01.01.01.0", true,
+ "Professional 12-bit monochrome"},
+
+ // -- Chroma subsampling --
+ // Field is parsed by digits <x><y><position>
+ // where positions are [unknown, vertical, colocated]
+ {"av01.0.10M.08.0.112.01.01.01.0", true, "Chroma colocated"},
+ // Main Profile, 4:2:0 tested above
+ {"av01.0.10M.08.0.100.01.01.01.0", false,
+ "4:2:2 not allowed on Main Profile"},
+ // High Profile, 4:4:4 tested above
+ {"av01.1.10M.08.0.110.01.01.01.0", false,
+ "4:4:4 required on High Profile"},
+ {"av01.2.10M.08.0.110.01.01.01.0", false,
+ "4:2:0 not allowed on 8-bit Professional"},
+ // Professional Profile, 8-bit 4:2:2 tested above
+ // Professional Profile, 12-bit 4:2:0 tested above
+ {"av01.2.10M.12.0.100.01.01.01.0", true, "12-bit 4:2:2"},
+ {"av01.2.10M.12.0.000.01.01.01.0", true, "12-bit 4:4:4"},
+
+ {"av01.2.10M.08.0.101.01.01.01.0", false, "Chroma position with 4:2:2"},
+ {"av01.1.10M.08.0.001.01.01.01.0", false, "Chroma position with 4:4:4"},
+ {"av01.0.10M.08.0.113.01.01.01.0", false, "Chroma position 3 (Reserved)"},
+
+ // -- Color primaries --
+ // 0, 3, [13-21], >23 are reserved
+ // 1 (BT709) is tested above
+ {"av01.0.10M.10.0.110.09.16.09.0", true,
+ "Color space: BT2020/SMPTE2084/BT2020NCL"},
+ {"av01.0.10M.10.0.110.00.16.09.0", false, "Primaries 0: Reserved"},
+ {"av01.0.10M.10.0.110.03.16.09.0", false, "Primaries 3: Reserved"},
+ {"av01.0.10M.10.0.110.13.16.09.0", false, "Primaries 13: Reserved"},
+ {"av01.0.10M.10.0.110.21.16.09.0", false, "Primaries 21: Reserved"},
+ {"av01.0.10M.10.0.110.22.16.09.0", true, "Primaries 22: EBU3213"},
+ {"av01.0.10M.10.0.110.23.16.09.0", false, "Primaries 23: Reserved"},
+
+ // -- Transfer characteristics --
+ // 0, 3, >19 are all reserved
+ // 1 (BT709) is tested above
+ // 16 (SMPTE2084) is tested above
+ {"av01.0.10M.10.0.110.09.14.09.0", true,
+ "Color space: BT2020/BT2020 10-bit/BT2020NCL"},
+ {"av01.0.10M.10.0.110.09.00.09.0", false, "Transfer 0: Reserved"},
+ {"av01.0.10M.10.0.110.09.03.09.0", false, "Transfer 3: Reserved"},
+ {"av01.0.10M.10.0.110.09.20.09.0", false, "Transfer 20: Reserved"},
+
+ // -- Matrix coefficients --
+ // 3, >15 are all reserved
+ // 1 (BT709) is tested above
+ // 9 (BT2020NCL) is tested above
+ {"av01.1.10M.10.0.000.01.13.00.1", true, "4:4:4 10-bit sRGB"},
+ {"av01.1.10M.10.0.000.01.13.00.0", false, "sRGB requires full range"},
+ {"av01.2.10M.10.0.100.01.13.00.1", false,
+ "Subsampling incompatible with sRGB"},
+ {"av01.2.10M.12.0.000.01.13.00.1", true, "4:4:4 12-bit sRGB"},
+ {"av01.2.10M.12.0.000.01.01.15.1", false, "Matrix 15: Reserved"},
+
+ // -- Color range --
+ // Full range and limited range tested above
+ {"av01.0.10M.12.0.002.01.13.00.2", false, "Color range 2 invalid"},
+ };
+
+ for (const auto& data : tests) {
+ auto info = AOMDecoder::CreateSequenceInfoFromCodecs(
+ NS_ConvertUTF8toUTF16(data.mCodecParameterString));
+ nsAutoCString desc = nsAutoCString(data.mCodecParameterString,
+ strlen(data.mCodecParameterString));
+ desc.AppendLiteral(" (");
+ desc.Append(data.mComment, strlen(data.mComment));
+ desc.AppendLiteral(")");
+ EXPECT_EQ(info.isSome(), data.mExpectedValue) << desc;
+
+ if (info.isSome()) {
+ AOMDecoder::AV1SequenceInfo inputInfo = info.value();
+ inputInfo.mImage = gfx::IntSize(1920, 1080);
+ RefPtr<MediaByteBuffer> buffer = new MediaByteBuffer();
+ bool wroteSequenceHeader;
+ AOMDecoder::WriteAV1CBox(inputInfo, buffer, wroteSequenceHeader);
+ EXPECT_EQ(wroteSequenceHeader, data.mExpectedValue) << desc;
+ // Read equality test will fail also, don't clutter.
+ if (!wroteSequenceHeader) {
+ continue;
+ }
+ AOMDecoder::AV1SequenceInfo parsedInfo;
+ bool readSequenceHeader;
+ AOMDecoder::ReadAV1CBox(buffer, parsedInfo, readSequenceHeader);
+ EXPECT_EQ(wroteSequenceHeader, readSequenceHeader) << desc;
+ EXPECT_EQ(inputInfo, parsedInfo) << desc;
+ }
+ }
+}
+
+TEST(ExtractAV1CodecDetails, TestParsingOutput)
+{
+ auto info = AOMDecoder::CreateSequenceInfoFromCodecs(
+ nsString(u"av01.0.14M.08.0.112.01.01.01.0"));
+ EXPECT_TRUE(info.isSome());
+
+ if (info.isSome()) {
+ EXPECT_EQ(info->mProfile, 0u);
+ EXPECT_EQ(info->mOperatingPoints.Length(), 1u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLayers, 0u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLevel, 14u);
+ EXPECT_EQ(info->mOperatingPoints[0].mTier, 0u);
+ EXPECT_EQ(info->mBitDepth, 8u);
+ EXPECT_EQ(info->mMonochrome, false);
+ EXPECT_EQ(info->mSubsamplingX, true);
+ EXPECT_EQ(info->mSubsamplingY, true);
+ EXPECT_EQ(info->mChromaSamplePosition,
+ AOMDecoder::ChromaSamplePosition::Colocated);
+ EXPECT_EQ(info->mColorSpace.mPrimaries, gfx::CICP::CP_BT709);
+ EXPECT_EQ(info->mColorSpace.mTransfer, gfx::CICP::TC_BT709);
+ EXPECT_EQ(info->mColorSpace.mMatrix, gfx::CICP::MC_BT709);
+ EXPECT_EQ(info->mColorSpace.mRange, gfx::ColorRange::LIMITED);
+ }
+
+ info = AOMDecoder::CreateSequenceInfoFromCodecs(
+ nsString(u"av01.1.11H.10.0.000.07.07.07.1"));
+ EXPECT_TRUE(info.isSome());
+
+ if (info.isSome()) {
+ EXPECT_EQ(info->mProfile, 1u);
+ EXPECT_EQ(info->mOperatingPoints.Length(), 1u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLayers, 0u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLevel, 11u);
+ EXPECT_EQ(info->mOperatingPoints[0].mTier, 1u);
+ EXPECT_EQ(info->mBitDepth, 10u);
+ EXPECT_EQ(info->mMonochrome, false);
+ EXPECT_EQ(info->mSubsamplingX, false);
+ EXPECT_EQ(info->mSubsamplingY, false);
+ EXPECT_EQ(info->mChromaSamplePosition,
+ AOMDecoder::ChromaSamplePosition::Unknown);
+ EXPECT_EQ(info->mColorSpace.mPrimaries, gfx::CICP::CP_SMPTE240);
+ EXPECT_EQ(info->mColorSpace.mTransfer, gfx::CICP::TC_SMPTE240);
+ EXPECT_EQ(info->mColorSpace.mMatrix, gfx::CICP::MC_SMPTE240);
+ EXPECT_EQ(info->mColorSpace.mRange, gfx::ColorRange::FULL);
+ }
+
+ info = AOMDecoder::CreateSequenceInfoFromCodecs(
+ nsString(u"av01.2.22H.12.1.110.10.08.04.1"));
+ EXPECT_TRUE(info.isSome());
+
+ if (info.isSome()) {
+ EXPECT_EQ(info->mProfile, 2u);
+ EXPECT_EQ(info->mOperatingPoints.Length(), 1u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLayers, 0u);
+ EXPECT_EQ(info->mOperatingPoints[0].mLevel, 22u);
+ EXPECT_EQ(info->mOperatingPoints[0].mTier, 1u);
+ EXPECT_EQ(info->mBitDepth, 12u);
+ EXPECT_EQ(info->mMonochrome, true);
+ EXPECT_EQ(info->mSubsamplingX, true);
+ EXPECT_EQ(info->mSubsamplingY, true);
+ EXPECT_EQ(info->mChromaSamplePosition,
+ AOMDecoder::ChromaSamplePosition::Unknown);
+ EXPECT_EQ(info->mColorSpace.mPrimaries, gfx::CICP::CP_XYZ);
+ EXPECT_EQ(info->mColorSpace.mTransfer, gfx::CICP::TC_LINEAR);
+ EXPECT_EQ(info->mColorSpace.mMatrix, gfx::CICP::MC_FCC);
+ EXPECT_EQ(info->mColorSpace.mRange, gfx::ColorRange::FULL);
+ }
+}
diff --git a/dom/media/mediasource/gtest/TestExtractVPXCodecDetails.cpp b/dom/media/mediasource/gtest/TestExtractVPXCodecDetails.cpp
new file mode 100644
index 0000000000..7e255e2dc7
--- /dev/null
+++ b/dom/media/mediasource/gtest/TestExtractVPXCodecDetails.cpp
@@ -0,0 +1,141 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <gtest/gtest.h>
+#include <stdint.h>
+
+#include "VideoUtils.h"
+
+using namespace mozilla;
+
+struct TestData {
+ const char16_t* const mCodecParameterString;
+ const bool mExpectedValue;
+ const char* const mComment;
+};
+
+TEST(ExtractVPXCodecDetails, TestInputData)
+{
+ TestData tests[] = {
+ // <sample entry 4CC>.<profile>.<level>.<bitDepth>.<chromaSubsampling>.
+ // <colourPrimaries>.<transferCharacteristics>.<matrixCoefficients>.
+ // <videoFullRangeFlag>
+
+ // Format checks
+ {u"vp09.0.10.8", true, "Valid minimum length"},
+ {u"vp9.00.10.08", false, "Invalid 4CC"},
+ {u"vp09.00..08", false, "Blank field"},
+ {u"vp09", false, "0 of 3 required fields"},
+ {u"vp09.00", false, "1 of 3 required fields"},
+ {u"vp09.00.10", false, "2 of 3 required fields"},
+
+ // Profiles
+ {u"vp09.00.10.08", true, "Profile 0"},
+ {u"vp09.01.10.08", true, "Profile 1"},
+ {u"vp09.02.10.10", true, "Profile 2"},
+ {u"vp09.03.10.10", true, "Profile 3"},
+ {u"vp09.-1.10.08", false, "Invalid profile < 0"},
+ {u"vp09.04.10.08", false, "Invalid profile > 3"},
+
+ // Levels
+ {u"vp09.00.11.08", true, "Level 1.1"},
+ {u"vp09.00.12.08", false, "Invalid level 1.2"},
+ {u"vp09.00.52.08", true, "Level 5.2"},
+ {u"vp09.00.64.08", false, "Level greater than max"},
+
+ // Bit depths
+ // - 8-bit tested in Profiles section
+ // - 10-bit tested in Profiles section
+ {u"vp09.02.10.12", true, "12-bit"},
+ {u"vp09.00.10.07", false, "Invalid, 7-bit"},
+ {u"vp09.02.10.11", false, "Invalid, 11-bit"},
+ {u"vp09.02.10.13", false, "Invalid, 13-bit"},
+
+ // Chroma subsampling
+ {u"vp09.00.10.08.00", true, "4:2:0 vertical"},
+ {u"vp09.00.10.08.01", true, "4:2:0 colocated"},
+ {u"vp09.00.10.08.02", true, "4:2:2"},
+ {u"vp09.00.10.08.03", true, "4:4:4"},
+ {u"vp09.00.10.08.04", false, "Invalid chroma"},
+
+ // Color primaries
+ {u"vp09.00.10.08.01.00", false, "CP 0: Reserved"},
+ {u"vp09.00.10.08.01.01", true, "CP 1: BT.709"},
+ {u"vp09.00.10.08.01.03", false, "CP 3: Reserved"},
+ {u"vp09.00.10.08.01.09", true, "CP 9: BT.2020"},
+ {u"vp09.00.10.08.01.21", false, "CP 21: Reserved"},
+ {u"vp09.00.10.08.01.22", true, "CP 22: EBU Tech 3213"},
+ {u"vp09.00.10.08.01.23", false, "CP 23: Out of range"},
+
+ // Transfer characteristics
+ {u"vp09.00.10.08.01.01.00", false, "TC 0: Reserved"},
+ {u"vp09.00.10.08.01.01.01", true, "TC 1: BT.709"},
+ {u"vp09.00.10.08.01.01.03", false, "TC 3: Reserved"},
+ {u"vp09.00.10.08.01.09.16", true, "TC 16: ST 2084"},
+ {u"vp09.00.10.08.01.09.19", false, "TC 19: Out of range"},
+
+ // Matrix coefficients
+ {u"vp09.00.10.08.03.09.16.00", true, "MC 0: Identity"},
+ {u"vp09.00.10.08.01.09.16.00", false, "MC 0: Identity without 4:4:4"},
+ {u"vp09.00.10.08.01.09.16.01", true, "MC 1: BT.709"},
+ {u"vp09.00.10.08.01.09.16.03", false, "MC 3: Reserved"},
+ {u"vp09.00.10.08.01.09.16.09", true, "MC 9: BT.2020"},
+ {u"vp09.00.10.08.01.09.16.15", false, "MC 15: Out of range"},
+
+ // Color range
+ {u"vp09.00.10.08.01.09.16.09.00", true, "Limited range"},
+ {u"vp09.00.10.08.01.09.16.09.01", true, "Full range"},
+ {u"vp09.00.10.08.01.09.16.09.02", false, "Invalid range value"},
+
+ {u"vp09.00.10.08.01.09.16.09.00.", false, "Extra ."},
+ {u"vp09.00.10.08.01.09.16.09.00.00", false, "More than 9 fields"},
+ };
+
+ for (const auto& data : tests) {
+ uint8_t profile = 0;
+ uint8_t level = 0;
+ uint8_t bitDepth = 0;
+ bool result = ExtractVPXCodecDetails(nsString(data.mCodecParameterString),
+ profile, level, bitDepth);
+ EXPECT_EQ(result, data.mExpectedValue)
+ << NS_ConvertUTF16toUTF8(data.mCodecParameterString).get() << " ("
+ << data.mComment << ")";
+ }
+}
+
+TEST(ExtractVPXCodecDetails, TestParsingOutput)
+{
+ uint8_t profile = 0;
+ uint8_t level = 0;
+ uint8_t bitDepth = 0;
+ uint8_t chromaSubsampling = 0;
+ VideoColorSpace colorSpace;
+ auto data = u"vp09.01.11.08";
+ bool result = ExtractVPXCodecDetails(nsString(data), profile, level, bitDepth,
+ chromaSubsampling, colorSpace);
+ EXPECT_EQ(result, true);
+ EXPECT_EQ(profile, 1);
+ EXPECT_EQ(level, 11);
+ EXPECT_EQ(bitDepth, 8);
+ // Should keep spec defined default value.
+ EXPECT_EQ(chromaSubsampling, 1);
+ EXPECT_EQ(colorSpace.mPrimaries, gfx::CICP::CP_BT709);
+ EXPECT_EQ(colorSpace.mTransfer, gfx::CICP::TC_BT709);
+ EXPECT_EQ(colorSpace.mMatrix, gfx::CICP::MC_BT709);
+ EXPECT_EQ(colorSpace.mRange, gfx::ColorRange::LIMITED);
+
+ data = u"vp09.02.10.10.01.09.16.09.01";
+ result = ExtractVPXCodecDetails(nsString(data), profile, level, bitDepth,
+ chromaSubsampling, colorSpace);
+ EXPECT_EQ(result, true);
+ EXPECT_EQ(profile, 2);
+ EXPECT_EQ(level, 10);
+ EXPECT_EQ(bitDepth, 10);
+ EXPECT_EQ(chromaSubsampling, 1);
+ EXPECT_EQ(colorSpace.mPrimaries, gfx::CICP::CP_BT2020);
+ EXPECT_EQ(colorSpace.mTransfer, gfx::CICP::TC_SMPTE2084);
+ EXPECT_EQ(colorSpace.mMatrix, gfx::CICP::MC_BT2020_NCL);
+ EXPECT_EQ(colorSpace.mRange, gfx::ColorRange::FULL);
+}
diff --git a/dom/media/mediasource/gtest/moz.build b/dom/media/mediasource/gtest/moz.build
new file mode 100644
index 0000000000..42ef6beb9b
--- /dev/null
+++ b/dom/media/mediasource/gtest/moz.build
@@ -0,0 +1,22 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+UNIFIED_SOURCES += [
+ "TestContainerParser.cpp",
+ "TestExtractVPXCodecDetails.cpp",
+]
+
+if CONFIG["MOZ_AV1"]:
+ UNIFIED_SOURCES += [
+ "TestExtractAV1CodecDetails.cpp",
+ ]
+
+LOCAL_INCLUDES += [
+ "/dom/media",
+ "/dom/media/mediasource",
+]
+
+FINAL_LIBRARY = "xul-gtest"
diff --git a/dom/media/mediasource/moz.build b/dom/media/mediasource/moz.build
new file mode 100644
index 0000000000..3fa98e42b7
--- /dev/null
+++ b/dom/media/mediasource/moz.build
@@ -0,0 +1,42 @@
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+MOCHITEST_MANIFESTS += ["test/mochitest.ini"]
+
+EXPORTS += [
+ "AsyncEventRunner.h",
+ "MediaSourceDecoder.h",
+ "MediaSourceDemuxer.h",
+ "SourceBufferAttributes.h",
+ "SourceBufferTask.h",
+ "TrackBuffersManager.h",
+]
+
+EXPORTS.mozilla.dom += [
+ "MediaSource.h",
+ "SourceBuffer.h",
+ "SourceBufferList.h",
+]
+
+UNIFIED_SOURCES += [
+ "ContainerParser.cpp",
+ "MediaSource.cpp",
+ "MediaSourceDecoder.cpp",
+ "MediaSourceDemuxer.cpp",
+ "MediaSourceUtils.cpp",
+ "ResourceQueue.cpp",
+ "SourceBuffer.cpp",
+ "SourceBufferList.cpp",
+ "SourceBufferResource.cpp",
+ "TrackBuffersManager.cpp",
+]
+
+TEST_DIRS += [
+ "gtest",
+]
+
+include("/ipc/chromium/chromium-config.mozbuild")
+
+FINAL_LIBRARY = "xul"
diff --git a/dom/media/mediasource/test/.eslintrc.js b/dom/media/mediasource/test/.eslintrc.js
new file mode 100644
index 0000000000..e283e384ba
--- /dev/null
+++ b/dom/media/mediasource/test/.eslintrc.js
@@ -0,0 +1,28 @@
+"use strict";
+
+module.exports = {
+ // Globals from mediasource.js. We use false to indicate they should not
+ // be overwritten in scripts.
+ globals: {
+ addMSEPrefs: false,
+ fetchAndLoad: false,
+ fetchAndLoadAsync: false,
+ fetchWithXHR: false,
+ logEvents: false,
+ loadSegment: false,
+ must_not_reject: false,
+ must_not_throw: false,
+ must_reject: false,
+ must_throw: false,
+ once: false,
+ range: false,
+ runWithMSE: false,
+ wait: false,
+ waitUntilTime: false,
+ },
+ // Use const/let instead of var for tighter scoping, avoiding redeclaration
+ rules: {
+ "no-var": "error",
+ "prefer-const": "error",
+ },
+};
diff --git a/dom/media/mediasource/test/1516754.webm b/dom/media/mediasource/test/1516754.webm
new file mode 100644
index 0000000000..05a008d906
--- /dev/null
+++ b/dom/media/mediasource/test/1516754.webm
Binary files differ
diff --git a/dom/media/mediasource/test/1516754.webm^headers^ b/dom/media/mediasource/test/1516754.webm^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/1516754.webm^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac20-48000-64000-1.m4s b/dom/media/mediasource/test/aac20-48000-64000-1.m4s
new file mode 100644
index 0000000000..56506e1f2d
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/aac20-48000-64000-1.m4s^headers^ b/dom/media/mediasource/test/aac20-48000-64000-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac20-48000-64000-2.m4s b/dom/media/mediasource/test/aac20-48000-64000-2.m4s
new file mode 100644
index 0000000000..3faff17ebf
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/aac20-48000-64000-2.m4s^headers^ b/dom/media/mediasource/test/aac20-48000-64000-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac20-48000-64000-init.mp4 b/dom/media/mediasource/test/aac20-48000-64000-init.mp4
new file mode 100644
index 0000000000..b70e016512
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/aac20-48000-64000-init.mp4^headers^ b/dom/media/mediasource/test/aac20-48000-64000-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac20-48000-64000-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac51-48000-128000-1.m4s b/dom/media/mediasource/test/aac51-48000-128000-1.m4s
new file mode 100644
index 0000000000..3424acfecc
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/aac51-48000-128000-1.m4s^headers^ b/dom/media/mediasource/test/aac51-48000-128000-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac51-48000-128000-2.m4s b/dom/media/mediasource/test/aac51-48000-128000-2.m4s
new file mode 100644
index 0000000000..b02bfd043d
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/aac51-48000-128000-2.m4s^headers^ b/dom/media/mediasource/test/aac51-48000-128000-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/aac51-48000-128000-init.mp4 b/dom/media/mediasource/test/aac51-48000-128000-init.mp4
new file mode 100644
index 0000000000..7d62401f28
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/aac51-48000-128000-init.mp4^headers^ b/dom/media/mediasource/test/aac51-48000-128000-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/aac51-48000-128000-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/avc3/init.mp4 b/dom/media/mediasource/test/avc3/init.mp4
new file mode 100644
index 0000000000..12fc38bd20
--- /dev/null
+++ b/dom/media/mediasource/test/avc3/init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/avc3/init.mp4^headers^ b/dom/media/mediasource/test/avc3/init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/avc3/init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/avc3/segment1.m4s b/dom/media/mediasource/test/avc3/segment1.m4s
new file mode 100644
index 0000000000..d95a6adf02
--- /dev/null
+++ b/dom/media/mediasource/test/avc3/segment1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/avc3/segment1.m4s^headers^ b/dom/media/mediasource/test/avc3/segment1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/avc3/segment1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop1.m4s b/dom/media/mediasource/test/bipbop/bipbop1.m4s
new file mode 100644
index 0000000000..a237f2e91e
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop10.m4s b/dom/media/mediasource/test/bipbop/bipbop10.m4s
new file mode 100644
index 0000000000..d1f5e6a0b0
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop10.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop10.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop10.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop10.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop11.m4s b/dom/media/mediasource/test/bipbop/bipbop11.m4s
new file mode 100644
index 0000000000..57232fb359
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop11.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop11.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop11.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop11.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop12.m4s b/dom/media/mediasource/test/bipbop/bipbop12.m4s
new file mode 100644
index 0000000000..f9b18713ee
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop12.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop12.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop12.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop12.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop13.m4s b/dom/media/mediasource/test/bipbop/bipbop13.m4s
new file mode 100644
index 0000000000..f2a876946c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop13.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop13.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop13.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop13.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop2.m4s b/dom/media/mediasource/test/bipbop/bipbop2.m4s
new file mode 100644
index 0000000000..baa0d8578c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop2s.mp4 b/dom/media/mediasource/test/bipbop/bipbop2s.mp4
new file mode 100644
index 0000000000..4fd8b9cb6e
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop2s.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop2s.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop2s.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop2s.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop3.m4s b/dom/media/mediasource/test/bipbop/bipbop3.m4s
new file mode 100644
index 0000000000..ed313e668c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop3.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop3.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop3.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop3.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop4.m4s b/dom/media/mediasource/test/bipbop/bipbop4.m4s
new file mode 100644
index 0000000000..7709ac08c5
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop4.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop4.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop4.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop4.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop5.m4s b/dom/media/mediasource/test/bipbop/bipbop5.m4s
new file mode 100644
index 0000000000..6d36788e44
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop5.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop5.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop5.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop5.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop6.m4s b/dom/media/mediasource/test/bipbop/bipbop6.m4s
new file mode 100644
index 0000000000..64f475c700
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop6.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop6.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop6.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop6.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop7.m4s b/dom/media/mediasource/test/bipbop/bipbop7.m4s
new file mode 100644
index 0000000000..c148918d6d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop7.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop7.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop7.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop7.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop8.m4s b/dom/media/mediasource/test/bipbop/bipbop8.m4s
new file mode 100644
index 0000000000..707dd48485
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop8.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop8.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop8.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop8.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop9.m4s b/dom/media/mediasource/test/bipbop/bipbop9.m4s
new file mode 100644
index 0000000000..538cf72a4d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop9.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop9.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop9.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop9.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm b/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm
new file mode 100644
index 0000000000..db578dc96c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm^headers^ b/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_300-3s.webm^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s
new file mode 100644
index 0000000000..3dad336e8e
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s
new file mode 100644
index 0000000000..dd7491241f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-video2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4 b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4
new file mode 100644
index 0000000000..b1a2d44058
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_480_624kbps-videoinit.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s
new file mode 100644
index 0000000000..33da98b5a9
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s
new file mode 100644
index 0000000000..36a98afd29
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio10.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s
new file mode 100644
index 0000000000..23d4aa8d86
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio11.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s
new file mode 100644
index 0000000000..96f4bcc344
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s
new file mode 100644
index 0000000000..7de4bd0ca1
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio3.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s
new file mode 100644
index 0000000000..494c71eb92
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio4.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s
new file mode 100644
index 0000000000..b50496b6ce
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio5.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s
new file mode 100644
index 0000000000..02cf4d363c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio6.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s
new file mode 100644
index 0000000000..bb2252889f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio7.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s
new file mode 100644
index 0000000000..04a6a7af91
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio8.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s b/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s
new file mode 100644
index 0000000000..cb94b529a7
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audio9.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4 b/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4
new file mode 100644
index 0000000000..bbf272197d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_audioinit.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_dash.mpd b/dom/media/mediasource/test/bipbop/bipbop_dash.mpd
new file mode 100644
index 0000000000..532cdc65d5
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_dash.mpd
@@ -0,0 +1,48 @@
+<?xml version="1.0"?>
+<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" minBufferTime="PT1.500000S" type="static" mediaPresentationDuration="PT0H0M9.98S" profiles="urn:mpeg:dash:profile:full:2011">
+ <ProgramInformation moreInformationURL="http://gpac.sourceforge.net">
+ <Title>bipbop_dash.mpd handcrafted by JYA</Title>
+ </ProgramInformation>
+
+ <Period duration="PT0H0M9.98S">
+ <AdaptationSet segmentAlignment="true" maxWidth="400" maxHeight="300" maxFrameRate="90000" par="4:3" lang="und">
+ <Representation id="1" mimeType="video/mp4" codecs="avc1.4d4015" width="400" height="300" frameRate="90000" sar="1:1" startWithSAP="1" bandwidth="226425">
+ <SegmentList timescale="90000" duration="69043">
+ <Initialization sourceURL="bipbop_videoinit.mp4"/>
+ <SegmentURL media="bipbop_video1.m4s"/>
+ <SegmentURL media="bipbop_video2.m4s"/>
+ <SegmentURL media="bipbop_video3.m4s"/>
+ <SegmentURL media="bipbop_video4.m4s"/>
+ <SegmentURL media="bipbop_video5.m4s"/>
+ <SegmentURL media="bipbop_video6.m4s"/>
+ <SegmentURL media="bipbop_video7.m4s"/>
+ <SegmentURL media="bipbop_video8.m4s"/>
+ <SegmentURL media="bipbop_video9.m4s"/>
+ <SegmentURL media="bipbop_video10.m4s"/>
+ <SegmentURL media="bipbop_video11.m4s"/>
+ <SegmentURL media="bipbop_video12.m4s"/>
+ <SegmentURL media="bipbop_video13.m4s"/>
+ </SegmentList>
+ </Representation>
+ </AdaptationSet>
+ <AdaptationSet segmentAlignment="true" lang="und">
+ <Representation id="1" mimeType="audio/mp4" codecs="mp4a.40.2" audioSamplingRate="22050" startWithSAP="1" bandwidth="7206">
+ <AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"/>
+ <SegmentList timescale="22050" duration="20101">
+ <Initialization sourceURL="bipbop_audioinit.mp4"/>
+ <SegmentURL media="bipbop_audio1.m4s"/>
+ <SegmentURL media="bipbop_audio2.m4s"/>
+ <SegmentURL media="bipbop_audio3.m4s"/>
+ <SegmentURL media="bipbop_audio4.m4s"/>
+ <SegmentURL media="bipbop_audio5.m4s"/>
+ <SegmentURL media="bipbop_audio6.m4s"/>
+ <SegmentURL media="bipbop_audio7.m4s"/>
+ <SegmentURL media="bipbop_audio8.m4s"/>
+ <SegmentURL media="bipbop_audio9.m4s"/>
+ <SegmentURL media="bipbop_audio10.m4s"/>
+ <SegmentURL media="bipbop_audio11.m4s"/>
+ </SegmentList>
+ </Representation>
+ </AdaptationSet>
+ </Period>
+</MPD>
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s
new file mode 100644
index 0000000000..2b95d49de9
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s
new file mode 100644
index 0000000000..3d95e7e2bf
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4
new file mode 100644
index 0000000000..cc7a48b5ce
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.0-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s
new file mode 100644
index 0000000000..d67c4ef4cc
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s
new file mode 100644
index 0000000000..be155dbb9c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4
new file mode 100644
index 0000000000..b67beb9548
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.1-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s
new file mode 100644
index 0000000000..2056aaec7f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s
new file mode 100644
index 0000000000..ccdad15c39
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4
new file mode 100644
index 0000000000..5b618c64d8
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.2-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s
new file mode 100644
index 0000000000..c834ea6ae8
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s
new file mode 100644
index 0000000000..aad6b355ae
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4
new file mode 100644
index 0000000000..1f878bc84b
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.3-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s
new file mode 100644
index 0000000000..88f05ee8bb
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s
new file mode 100644
index 0000000000..23ecab42e2
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4
new file mode 100644
index 0000000000..3e5ad8ad3b
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.4-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s
new file mode 100644
index 0000000000..df05700d87
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s
new file mode 100644
index 0000000000..14daa425c7
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4
new file mode 100644
index 0000000000..2101dd876c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.5-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s
new file mode 100644
index 0000000000..ef0a4614fc
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s
new file mode 100644
index 0000000000..8f7c819867
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4
new file mode 100644
index 0000000000..91f48ab6a1
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.6-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s
new file mode 100644
index 0000000000..dded8a37af
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s
new file mode 100644
index 0000000000..2a3c10859c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4
new file mode 100644
index 0000000000..cf45610f7b
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.7-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s
new file mode 100644
index 0000000000..74f0addd4f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s
new file mode 100644
index 0000000000..f062c85333
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4
new file mode 100644
index 0000000000..30a0ab0fed
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.8-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s
new file mode 100644
index 0000000000..b74ebf1f64
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s
new file mode 100644
index 0000000000..eabd8a3411
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4
new file mode 100644
index 0000000000..449722b0fd
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_0.9-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s
new file mode 100644
index 0000000000..e032afcc4f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s
new file mode 100644
index 0000000000..6542c8d3d3
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4
new file mode 100644
index 0000000000..0a9da048f0
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.0-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s
new file mode 100644
index 0000000000..1b8b22be4a
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s
new file mode 100644
index 0000000000..3de855982f
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4 b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4
new file mode 100644
index 0000000000..80b3814f7c
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_offset_1.1-init.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s b/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s
new file mode 100644
index 0000000000..fa5d454277
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_trailing_skip_box_video1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video1.m4s b/dom/media/mediasource/test/bipbop/bipbop_video1.m4s
new file mode 100644
index 0000000000..9291182516
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video1.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video1.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video1.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video1.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video10.m4s b/dom/media/mediasource/test/bipbop/bipbop_video10.m4s
new file mode 100644
index 0000000000..72c7afaca7
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video10.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video10.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video10.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video10.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video11.m4s b/dom/media/mediasource/test/bipbop/bipbop_video11.m4s
new file mode 100644
index 0000000000..e6109f5e71
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video11.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video11.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video11.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video11.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video12.m4s b/dom/media/mediasource/test/bipbop/bipbop_video12.m4s
new file mode 100644
index 0000000000..5c54a510f7
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video12.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video12.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video12.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video12.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video13.m4s b/dom/media/mediasource/test/bipbop/bipbop_video13.m4s
new file mode 100644
index 0000000000..c64f38a337
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video13.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video13.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video13.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video13.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video2.m4s b/dom/media/mediasource/test/bipbop/bipbop_video2.m4s
new file mode 100644
index 0000000000..cd34fae561
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video2.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video2.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video2.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video2.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video3.m4s b/dom/media/mediasource/test/bipbop/bipbop_video3.m4s
new file mode 100644
index 0000000000..5a13340043
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video3.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video3.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video3.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video3.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video4.m4s b/dom/media/mediasource/test/bipbop/bipbop_video4.m4s
new file mode 100644
index 0000000000..e8d96b6ed1
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video4.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video4.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video4.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video4.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video5.m4s b/dom/media/mediasource/test/bipbop/bipbop_video5.m4s
new file mode 100644
index 0000000000..ca6a820468
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video5.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video5.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video5.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video5.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video6.m4s b/dom/media/mediasource/test/bipbop/bipbop_video6.m4s
new file mode 100644
index 0000000000..fe9824355b
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video6.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video6.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video6.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video6.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video7.m4s b/dom/media/mediasource/test/bipbop/bipbop_video7.m4s
new file mode 100644
index 0000000000..3351fa6859
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video7.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video7.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video7.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video7.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video8.m4s b/dom/media/mediasource/test/bipbop/bipbop_video8.m4s
new file mode 100644
index 0000000000..af26ae5f9e
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video8.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video8.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video8.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video8.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video9.m4s b/dom/media/mediasource/test/bipbop/bipbop_video9.m4s
new file mode 100644
index 0000000000..25be672c15
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video9.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_video9.m4s^headers^ b/dom/media/mediasource/test/bipbop/bipbop_video9.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_video9.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4 b/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4
new file mode 100644
index 0000000000..7c9c533c36
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbop_videoinit.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bipbop/bipbopinit.mp4 b/dom/media/mediasource/test/bipbop/bipbopinit.mp4
new file mode 100644
index 0000000000..39f0575a71
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbopinit.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bipbop/bipbopinit.mp4^headers^ b/dom/media/mediasource/test/bipbop/bipbopinit.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/bipbop/bipbopinit.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/bug1718709_high_res.mp4 b/dom/media/mediasource/test/bug1718709_high_res.mp4
new file mode 100644
index 0000000000..3f211d2370
--- /dev/null
+++ b/dom/media/mediasource/test/bug1718709_high_res.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/bug1718709_low_res.mp4 b/dom/media/mediasource/test/bug1718709_low_res.mp4
new file mode 100644
index 0000000000..dc5cd6b7f1
--- /dev/null
+++ b/dom/media/mediasource/test/bug1718709_low_res.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/crashtests/1005366.html b/dom/media/mediasource/test/crashtests/1005366.html
new file mode 100644
index 0000000000..aa8b7f652e
--- /dev/null
+++ b/dom/media/mediasource/test/crashtests/1005366.html
@@ -0,0 +1,27 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="UTF-8">
+<script>
+
+/*
+user_pref("media.mediasource.enabled", true);
+*/
+
+function boom()
+{
+ var source = new window.MediaSource();
+ var videoElement = document.createElementNS('http://www.w3.org/1999/xhtml', 'video');
+ videoElement.src = URL.createObjectURL(source);
+
+ setTimeout(function() {
+ var buf = source.addSourceBuffer("video/webm");
+ buf.abort();
+ buf.appendBuffer(new Float32Array(203));
+ }, 0);
+}
+
+</script>
+</head>
+<body onload="boom();"></body>
+</html>
diff --git a/dom/media/mediasource/test/crashtests/1059035.html b/dom/media/mediasource/test/crashtests/1059035.html
new file mode 100644
index 0000000000..9dfda34b82
--- /dev/null
+++ b/dom/media/mediasource/test/crashtests/1059035.html
@@ -0,0 +1,26 @@
+<!DOCTYPE html>
+<html>
+<head>
+<script>
+
+/*
+user_pref("media.mediasource.enabled", true);
+*/
+
+function boom()
+{
+ var mediaSource = new MediaSource();
+ var htmlAudio = document.createElement("audio");
+ htmlAudio.src = URL.createObjectURL(mediaSource);
+
+ setTimeout(function() {
+ var sourceBuffer = mediaSource.addSourceBuffer("video/webm");
+ mediaSource.removeSourceBuffer(sourceBuffer);
+ sourceBuffer.remove(0, 0);
+ }, 0);
+}
+
+</script>
+</head>
+<body onload="boom();"></body>
+</html>
diff --git a/dom/media/mediasource/test/crashtests/926665.html b/dom/media/mediasource/test/crashtests/926665.html
new file mode 100644
index 0000000000..ccac5a7644
--- /dev/null
+++ b/dom/media/mediasource/test/crashtests/926665.html
@@ -0,0 +1,26 @@
+<html>
+<head>
+<meta charset="UTF-8">
+<script style="display: none;" id="fuzz1" type="text/javascript">
+
+function boom()
+{
+ var mediaSource = new window.MediaSource();
+ var mediaSourceURL = URL.createObjectURL(mediaSource);
+ var v1 = document.createElement('video');
+ v1.src = mediaSourceURL;
+ mediaSource.addEventListener("sourceopen", function (e) {
+ var v2 = document.createElement('video');
+ v2.src = mediaSourceURL;
+ setTimeout(function () {
+ v2.src = "data:text/plain,1";
+ v1.src = "data:text/plain,2";
+ }, 0);
+ });
+}
+
+</script>
+</head>
+
+<body onload="boom();"></body>
+</html>
diff --git a/dom/media/mediasource/test/crashtests/931388.html b/dom/media/mediasource/test/crashtests/931388.html
new file mode 100644
index 0000000000..cdb5bd9add
--- /dev/null
+++ b/dom/media/mediasource/test/crashtests/931388.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="UTF-8">
+<script>
+
+function boom()
+{
+ var v = document.createElement('video');
+ v.src = URL.createObjectURL(new MediaSource());
+ v.play();
+}
+
+</script>
+</head>
+<body onload="boom();"></body>
+</html>
diff --git a/dom/media/mediasource/test/crashtests/crashtests.list b/dom/media/mediasource/test/crashtests/crashtests.list
new file mode 100644
index 0000000000..e16ec261d2
--- /dev/null
+++ b/dom/media/mediasource/test/crashtests/crashtests.list
@@ -0,0 +1,4 @@
+test-pref(media.mediasource.enabled,true) load 926665.html
+test-pref(media.mediasource.enabled,true) load 931388.html
+test-pref(media.mediasource.enabled,true) load 1005366.html
+test-pref(media.mediasource.enabled,true) load 1059035.html
diff --git a/dom/media/mediasource/test/flac/00001.m4s b/dom/media/mediasource/test/flac/00001.m4s
new file mode 100644
index 0000000000..02745ba2f8
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00001.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/flac/00001.m4s^headers^ b/dom/media/mediasource/test/flac/00001.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00001.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/flac/00002.m4s b/dom/media/mediasource/test/flac/00002.m4s
new file mode 100644
index 0000000000..cd6b1f5949
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00002.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/flac/00002.m4s^headers^ b/dom/media/mediasource/test/flac/00002.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00002.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/flac/00003.m4s b/dom/media/mediasource/test/flac/00003.m4s
new file mode 100644
index 0000000000..c5b78e1ce0
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00003.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/flac/00003.m4s^headers^ b/dom/media/mediasource/test/flac/00003.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/flac/00003.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/flac/IS.mp4 b/dom/media/mediasource/test/flac/IS.mp4
new file mode 100644
index 0000000000..7f108602fd
--- /dev/null
+++ b/dom/media/mediasource/test/flac/IS.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/flac/IS.mp4^headers^ b/dom/media/mediasource/test/flac/IS.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/flac/IS.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/init-trackid2.mp4 b/dom/media/mediasource/test/init-trackid2.mp4
new file mode 100644
index 0000000000..c96da9d4df
--- /dev/null
+++ b/dom/media/mediasource/test/init-trackid2.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/init-trackid2.mp4^headers^ b/dom/media/mediasource/test/init-trackid2.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/init-trackid2.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/init-trackid3.mp4 b/dom/media/mediasource/test/init-trackid3.mp4
new file mode 100644
index 0000000000..e37d8ea098
--- /dev/null
+++ b/dom/media/mediasource/test/init-trackid3.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/init-trackid3.mp4^headers^ b/dom/media/mediasource/test/init-trackid3.mp4^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/init-trackid3.mp4^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/mediasource.js b/dom/media/mediasource/test/mediasource.js
new file mode 100644
index 0000000000..71d8d4ef9f
--- /dev/null
+++ b/dom/media/mediasource/test/mediasource.js
@@ -0,0 +1,235 @@
+// Helpers for Media Source Extensions tests
+
+let gMSETestPrefs = [
+ ["media.mediasource.enabled", true],
+ ["media.audio-max-decode-error", 0],
+ ["media.video-max-decode-error", 0],
+];
+
+// Called before runWithMSE() to set the prefs before running MSE tests.
+function addMSEPrefs(...prefs) {
+ gMSETestPrefs = gMSETestPrefs.concat(prefs);
+}
+
+async function runWithMSE(testFunction) {
+ await once(window, "load");
+ await SpecialPowers.pushPrefEnv({ set: gMSETestPrefs });
+
+ const ms = new MediaSource();
+
+ const el = document.createElement("video");
+ el.src = URL.createObjectURL(ms);
+ el.preload = "auto";
+
+ document.body.appendChild(el);
+ SimpleTest.registerCleanupFunction(() => {
+ el.remove();
+ el.removeAttribute("src");
+ el.load();
+ });
+ try {
+ await testFunction(ms, el);
+ } catch (e) {
+ ok(false, `${testFunction.name} failed with error ${e.name}`);
+ throw e;
+ }
+}
+
+async function fetchWithXHR(uri) {
+ return new Promise(resolve => {
+ const xhr = new XMLHttpRequest();
+ xhr.open("GET", uri, true);
+ xhr.responseType = "arraybuffer";
+ xhr.addEventListener("load", function () {
+ is(
+ xhr.status,
+ 200,
+ "fetchWithXHR load uri='" + uri + "' status=" + xhr.status
+ );
+ resolve(xhr.response);
+ });
+ xhr.send();
+ });
+}
+
+function range(start, end) {
+ const rv = [];
+ for (let i = start; i < end; ++i) {
+ rv.push(i);
+ }
+ return rv;
+}
+
+function must_throw(f, msg, error = true) {
+ try {
+ f();
+ ok(!error, msg);
+ } catch (e) {
+ ok(error, msg);
+ if (error === true) {
+ ok(
+ false,
+ `Please provide name of expected error! Got ${e.name}: ${e.message}.`
+ );
+ } else if (e.name != error) {
+ throw e;
+ }
+ }
+}
+
+async function must_reject(f, msg, error = true) {
+ try {
+ await f();
+ ok(!error, msg);
+ } catch (e) {
+ ok(error, msg);
+ if (error === true) {
+ ok(
+ false,
+ `Please provide name of expected error! Got ${e.name}: ${e.message}.`
+ );
+ } else if (e.name != error) {
+ throw e;
+ }
+ }
+}
+
+const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
+
+const must_not_throw = (f, msg) => must_throw(f, msg, false);
+const must_not_reject = (f, msg) => must_reject(f, msg, false);
+
+async function once(target, name) {
+ return new Promise(r => target.addEventListener(name, r, { once: true }));
+}
+
+function timeRangeToString(r) {
+ let str = "TimeRanges: ";
+ for (let i = 0; i < r.length; i++) {
+ str += "[" + r.start(i) + ", " + r.end(i) + ")";
+ }
+ return str;
+}
+
+async function loadSegment(sb, typedArrayOrArrayBuffer) {
+ const typedArray =
+ typedArrayOrArrayBuffer instanceof ArrayBuffer
+ ? new Uint8Array(typedArrayOrArrayBuffer)
+ : typedArrayOrArrayBuffer;
+ info(
+ `Loading buffer: [${typedArray.byteOffset}, ${
+ typedArray.byteOffset + typedArray.byteLength
+ })`
+ );
+ const beforeBuffered = timeRangeToString(sb.buffered);
+ const p = once(sb, "update");
+ sb.appendBuffer(typedArray);
+ await p;
+ const afterBuffered = timeRangeToString(sb.buffered);
+ info(
+ `SourceBuffer buffered ranges grew from ${beforeBuffered} to ${afterBuffered}`
+ );
+}
+
+async function fetchAndLoad(sb, prefix, chunks, suffix) {
+ // Fetch the buffers in parallel.
+ const buffers = await Promise.all(
+ chunks.map(c => fetchWithXHR(prefix + c + suffix))
+ );
+
+ // Load them in series, as required per spec.
+ for (const buffer of buffers) {
+ await loadSegment(sb, buffer);
+ }
+}
+
+function loadSegmentAsync(sb, typedArrayOrArrayBuffer) {
+ const typedArray =
+ typedArrayOrArrayBuffer instanceof ArrayBuffer
+ ? new Uint8Array(typedArrayOrArrayBuffer)
+ : typedArrayOrArrayBuffer;
+ info(
+ `Loading buffer2: [${typedArray.byteOffset}, ${
+ typedArray.byteOffset + typedArray.byteLength
+ })`
+ );
+ const beforeBuffered = timeRangeToString(sb.buffered);
+ return sb.appendBufferAsync(typedArray).then(() => {
+ const afterBuffered = timeRangeToString(sb.buffered);
+ info(
+ `SourceBuffer buffered ranges grew from ${beforeBuffered} to ${afterBuffered}`
+ );
+ });
+}
+
+function fetchAndLoadAsync(sb, prefix, chunks, suffix) {
+ // Fetch the buffers in parallel.
+ const buffers = {};
+ const fetches = [];
+ for (const chunk of chunks) {
+ fetches.push(
+ fetchWithXHR(prefix + chunk + suffix).then(
+ ((c, x) => (buffers[c] = x)).bind(null, chunk)
+ )
+ );
+ }
+
+ // Load them in series, as required per spec.
+ return Promise.all(fetches).then(function () {
+ let rv = Promise.resolve();
+ for (const chunk of chunks) {
+ rv = rv.then(loadSegmentAsync.bind(null, sb, buffers[chunk]));
+ }
+ return rv;
+ });
+}
+
+// Register timeout function to dump debugging logs.
+SimpleTest.registerTimeoutFunction(async function () {
+ for (const v of document.getElementsByTagName("video")) {
+ console.log(await SpecialPowers.wrap(v).mozRequestDebugInfo());
+ }
+ for (const a of document.getElementsByTagName("audio")) {
+ console.log(await SpecialPowers.wrap(a).mozRequestDebugInfo());
+ }
+});
+
+async function waitUntilTime(target, targetTime) {
+ await new Promise(resolve => {
+ target.addEventListener("waiting", function onwaiting() {
+ info("Got a waiting event at " + target.currentTime);
+ if (target.currentTime >= targetTime) {
+ target.removeEventListener("waiting", onwaiting);
+ resolve();
+ }
+ });
+ });
+ ok(true, "Reached target time of: " + targetTime);
+}
+
+// Log events for debugging.
+
+function logEvents(el) {
+ [
+ "suspend",
+ "play",
+ "canplay",
+ "canplaythrough",
+ "loadstart",
+ "loadedmetadata",
+ "loadeddata",
+ "playing",
+ "ended",
+ "error",
+ "stalled",
+ "emptied",
+ "abort",
+ "waiting",
+ "pause",
+ "durationchange",
+ "seeking",
+ "seeked",
+ ].forEach(type =>
+ el.addEventListener(type, e => info(`got ${e.type} event`))
+ );
+}
diff --git a/dom/media/mediasource/test/mochitest.ini b/dom/media/mediasource/test/mochitest.ini
new file mode 100644
index 0000000000..f231255c1e
--- /dev/null
+++ b/dom/media/mediasource/test/mochitest.ini
@@ -0,0 +1,213 @@
+[DEFAULT]
+subsuite = media
+support-files =
+ mediasource.js
+ seek.webm seek.webm^headers^
+ seek_lowres.webm seek_lowres.webm^headers^
+ bipbop/bipbop_300-3s.webm bipbop/bipbop_300-3s.webm^headers^
+ bipbop/bipbop2s.mp4 bipbop/bipbop2s.mp4^headers^
+ bipbop/bipbop_trailing_skip_box_video1.m4s
+ bipbop/bipbop_trailing_skip_box_video1.m4s^headers^
+ bipbop/bipbopinit.mp4 bipbop/bipbop_audioinit.mp4 bipbop/bipbop_videoinit.mp4
+ bipbop/bipbop1.m4s bipbop/bipbop_audio1.m4s bipbop/bipbop_video1.m4s
+ bipbop/bipbop2.m4s bipbop/bipbop_audio2.m4s bipbop/bipbop_video2.m4s
+ bipbop/bipbop3.m4s bipbop/bipbop_audio3.m4s bipbop/bipbop_video3.m4s
+ bipbop/bipbop4.m4s bipbop/bipbop_audio4.m4s bipbop/bipbop_video4.m4s
+ bipbop/bipbop5.m4s bipbop/bipbop_audio5.m4s bipbop/bipbop_video5.m4s
+ bipbop/bipbop6.m4s bipbop/bipbop_audio6.m4s bipbop/bipbop_video6.m4s
+ bipbop/bipbop7.m4s bipbop/bipbop_audio7.m4s bipbop/bipbop_video7.m4s
+ bipbop/bipbop8.m4s bipbop/bipbop_audio8.m4s bipbop/bipbop_video8.m4s
+ bipbop/bipbop9.m4s bipbop/bipbop_audio9.m4s bipbop/bipbop_video9.m4s
+ bipbop/bipbop10.m4s bipbop/bipbop_audio10.m4s bipbop/bipbop_video10.m4s
+ bipbop/bipbop11.m4s bipbop/bipbop_audio11.m4s bipbop/bipbop_video11.m4s
+ bipbop/bipbop12.m4s bipbop/bipbop_video12.m4s
+ bipbop/bipbop13.m4s bipbop/bipbop_video13.m4s
+ bipbop/bipbopinit.mp4^headers^ bipbop/bipbop_audioinit.mp4^headers^ bipbop/bipbop_videoinit.mp4^headers^
+ bipbop/bipbop1.m4s^headers^ bipbop/bipbop_audio1.m4s^headers^ bipbop/bipbop_video1.m4s^headers^
+ bipbop/bipbop2.m4s^headers^ bipbop/bipbop_audio2.m4s^headers^ bipbop/bipbop_video2.m4s^headers^
+ bipbop/bipbop3.m4s^headers^ bipbop/bipbop_audio3.m4s^headers^ bipbop/bipbop_video3.m4s^headers^
+ bipbop/bipbop4.m4s^headers^ bipbop/bipbop_audio4.m4s^headers^ bipbop/bipbop_video4.m4s^headers^
+ bipbop/bipbop5.m4s^headers^ bipbop/bipbop_audio5.m4s^headers^ bipbop/bipbop_video5.m4s^headers^
+ bipbop/bipbop6.m4s^headers^ bipbop/bipbop_audio6.m4s^headers^ bipbop/bipbop_video6.m4s^headers^
+ bipbop/bipbop7.m4s^headers^ bipbop/bipbop_audio7.m4s^headers^ bipbop/bipbop_video7.m4s^headers^
+ bipbop/bipbop8.m4s^headers^ bipbop/bipbop_audio8.m4s^headers^ bipbop/bipbop_video8.m4s^headers^
+ bipbop/bipbop9.m4s^headers^ bipbop/bipbop_audio9.m4s^headers^ bipbop/bipbop_video9.m4s^headers^
+ bipbop/bipbop10.m4s^headers^ bipbop/bipbop_audio10.m4s^headers^ bipbop/bipbop_video10.m4s^headers^
+ bipbop/bipbop11.m4s^headers^ bipbop/bipbop_audio11.m4s^headers^ bipbop/bipbop_video11.m4s^headers^
+ bipbop/bipbop12.m4s^headers^ bipbop/bipbop_video12.m4s^headers^
+ bipbop/bipbop13.m4s^headers^ bipbop/bipbop_video13.m4s^headers^
+ bipbop/bipbop_offset_0.0-1.m4s
+ bipbop/bipbop_offset_0.0-1.m4s^headers^
+ bipbop/bipbop_offset_0.0-2.m4s
+ bipbop/bipbop_offset_0.0-2.m4s^headers^
+ bipbop/bipbop_offset_0.0-init.mp4
+ bipbop/bipbop_offset_0.0-init.mp4^headers^
+ bipbop/bipbop_offset_0.1-1.m4s
+ bipbop/bipbop_offset_0.1-1.m4s^headers^
+ bipbop/bipbop_offset_0.1-2.m4s
+ bipbop/bipbop_offset_0.1-2.m4s^headers^
+ bipbop/bipbop_offset_0.1-init.mp4
+ bipbop/bipbop_offset_0.1-init.mp4^headers^
+ bipbop/bipbop_offset_0.2-1.m4s
+ bipbop/bipbop_offset_0.2-1.m4s^headers^
+ bipbop/bipbop_offset_0.2-2.m4s
+ bipbop/bipbop_offset_0.2-2.m4s^headers^
+ bipbop/bipbop_offset_0.2-init.mp4
+ bipbop/bipbop_offset_0.2-init.mp4^headers^
+ bipbop/bipbop_offset_0.3-1.m4s
+ bipbop/bipbop_offset_0.3-1.m4s^headers^
+ bipbop/bipbop_offset_0.3-2.m4s
+ bipbop/bipbop_offset_0.3-2.m4s^headers^
+ bipbop/bipbop_offset_0.3-init.mp4
+ bipbop/bipbop_offset_0.3-init.mp4^headers^
+ bipbop/bipbop_offset_0.4-1.m4s
+ bipbop/bipbop_offset_0.4-1.m4s^headers^
+ bipbop/bipbop_offset_0.4-2.m4s
+ bipbop/bipbop_offset_0.4-2.m4s^headers^
+ bipbop/bipbop_offset_0.4-init.mp4
+ bipbop/bipbop_offset_0.4-init.mp4^headers^
+ bipbop/bipbop_offset_0.5-1.m4s
+ bipbop/bipbop_offset_0.5-1.m4s^headers^
+ bipbop/bipbop_offset_0.5-2.m4s
+ bipbop/bipbop_offset_0.5-2.m4s^headers^
+ bipbop/bipbop_offset_0.5-init.mp4
+ bipbop/bipbop_offset_0.5-init.mp4^headers^
+ bipbop/bipbop_offset_0.6-1.m4s
+ bipbop/bipbop_offset_0.6-1.m4s^headers^
+ bipbop/bipbop_offset_0.6-2.m4s
+ bipbop/bipbop_offset_0.6-2.m4s^headers^
+ bipbop/bipbop_offset_0.6-init.mp4
+ bipbop/bipbop_offset_0.6-init.mp4^headers^
+ bipbop/bipbop_offset_0.7-1.m4s
+ bipbop/bipbop_offset_0.7-1.m4s^headers^
+ bipbop/bipbop_offset_0.7-2.m4s
+ bipbop/bipbop_offset_0.7-2.m4s^headers^
+ bipbop/bipbop_offset_0.7-init.mp4
+ bipbop/bipbop_offset_0.7-init.mp4^headers^
+ bipbop/bipbop_offset_0.8-1.m4s
+ bipbop/bipbop_offset_0.8-1.m4s^headers^
+ bipbop/bipbop_offset_0.8-2.m4s
+ bipbop/bipbop_offset_0.8-2.m4s^headers^
+ bipbop/bipbop_offset_0.8-init.mp4
+ bipbop/bipbop_offset_0.8-init.mp4^headers^
+ bipbop/bipbop_offset_0.9-1.m4s
+ bipbop/bipbop_offset_0.9-1.m4s^headers^
+ bipbop/bipbop_offset_0.9-2.m4s
+ bipbop/bipbop_offset_0.9-2.m4s^headers^
+ bipbop/bipbop_offset_0.9-init.mp4
+ bipbop/bipbop_offset_0.9-init.mp4^headers^
+ bipbop/bipbop_offset_1.0-1.m4s
+ bipbop/bipbop_offset_1.0-1.m4s^headers^
+ bipbop/bipbop_offset_1.0-2.m4s
+ bipbop/bipbop_offset_1.0-2.m4s^headers^
+ bipbop/bipbop_offset_1.0-init.mp4
+ bipbop/bipbop_offset_1.0-init.mp4^headers^
+ bipbop/bipbop_offset_1.1-1.m4s
+ bipbop/bipbop_offset_1.1-1.m4s^headers^
+ bipbop/bipbop_offset_1.1-2.m4s
+ bipbop/bipbop_offset_1.1-2.m4s^headers^
+ bipbop/bipbop_offset_1.1-init.mp4
+ bipbop/bipbop_offset_1.1-init.mp4^headers^
+ aac20-48000-64000-init.mp4 aac20-48000-64000-init.mp4^headers^
+ aac20-48000-64000-1.m4s aac20-48000-64000-1.m4s^headers^
+ aac20-48000-64000-2.m4s aac20-48000-64000-2.m4s^headers^
+ aac51-48000-128000-init.mp4 aac51-48000-128000-init.mp4^headers^
+ aac51-48000-128000-1.m4s aac51-48000-128000-1.m4s^headers^
+ aac51-48000-128000-2.m4s aac51-48000-128000-2.m4s^headers^
+ bipbop/bipbop_480_624kbps-videoinit.mp4 bipbop/bipbop_480_624kbps-videoinit.mp4^headers^
+ bipbop/bipbop_480_624kbps-video1.m4s bipbop/bipbop_480_624kbps-video1.m4s^headers^
+ bipbop/bipbop_480_624kbps-video2.m4s bipbop/bipbop_480_624kbps-video2.m4s^headers^
+ flac/IS.mp4 flac/IS.mp4^headers^ flac/00001.m4s flac/00001.m4s^headers^
+ flac/00002.m4s flac/00002.m4s^headers^ flac/00003.m4s flac/00003.m4s^headers^
+ avc3/init.mp4 avc3/init.mp4^headers^ avc3/segment1.m4s avc3/segment1.m4s^headers^
+ tags_before_cluster.webm
+ tags_before_cluster.webm^header^
+ 1516754.webm 1516754.webm^headers^
+ init-trackid2.mp4 init-trackid3.mp4 segment-2.0001.m4s segment-2.0002.m4s segment-3.0001.m4s segment-3.0002.m4s
+ init-trackid2.mp4^headers^ init-trackid3.mp4^headers^ segment-2.0001.m4s^headers^ segment-2.0002.m4s^headers^
+ segment-3.0001.m4s^headers^ segment-3.0002.m4s^headers^
+ wmf_mismatchedaudiotime.mp4
+ bug1718709_low_res.mp4
+ bug1718709_high_res.mp4
+ whitenoise-he-aac-5s.mp4
+
+[test_AbortAfterPartialMediaSegment.html]
+[test_AppendPartialInitSegment.html]
+[test_AVC3_mp4.html]
+[test_AudioChange_mp4.html]
+[test_AudioChange_mp4_WebAudio.html]
+[test_AutoRevocation.html]
+tags = firstpartyisolation
+[test_BufferedSeek.html]
+[test_BufferedSeek_mp4.html]
+[test_BufferingWait.html]
+[test_BufferingWait_mp4.html]
+[test_ChangeType.html]
+[test_ChangeWhileWaitingOnMissingData_mp4.html]
+[test_DifferentStreamStartTimes.html]
+[test_DrainOnMissingData_mp4.html]
+[test_DurationChange.html]
+[test_DurationUpdated.html]
+[test_DurationUpdated_mp4.html]
+[test_EndedEvent.html]
+[test_EndOfStream.html]
+[test_EndOfStream_mp4.html]
+[test_Eviction_mp4.html]
+[test_ExperimentalAsync.html]
+[test_FrameSelection.html]
+skip-if = toolkit == 'android' # bug 1341519, bug 1401090
+[test_FrameSelection_mp4.html]
+skip-if = os == 'win' # bug 1487973,
+ (os == 'mac') # mac due to bug 1487973
+[test_HaveMetadataUnbufferedSeek.html]
+[test_HaveMetadataUnbufferedSeek_mp4.html]
+[test_HEAAC_extradata.html]
+[test_InputBufferIsCleared.html]
+[test_LiveSeekable.html]
+[test_LoadedDataFired_mp4.html]
+[test_LoadedMetadataFired.html]
+[test_LoadedMetadataFired_mp4.html]
+[test_MediaSource.html]
+[test_MediaSource_capture_gc.html]
+[test_MediaSource_memory_reporting.html]
+[test_MediaSource_mp4.html]
+[test_MediaSource_flac_mp4.html]
+[test_MediaSource_disabled.html]
+[test_MultipleInitSegments.html]
+[test_MultipleInitSegments_mp4.html]
+[test_NoAudioLoopBackData.html]
+[test_NoAudioLoopBackData_Muted.html]
+[test_NoVideoLoopBackData.html]
+[test_OnEvents.html]
+[test_PlayEvents.html]
+[test_PlayEventsAutoPlaying.html]
+[test_PlayEventsAutoPlaying2.html]
+[test_RemoveSourceBuffer.html]
+[test_Resolution_change_should_not_cause_video_freeze.html]
+[test_ResumeAfterClearing_mp4.html]
+[test_SeekableBeforeAndAfterEndOfStream.html]
+[test_SeekableBeforeAndAfterEndOfStream_mp4.html]
+[test_SeekableBeforeAndAfterEndOfStreamSplit.html]
+[test_SeekableBeforeAndAfterEndOfStreamSplit_mp4.html]
+[test_SeekNoData_mp4.html]
+[test_SeekedEvent_mp4.html]
+[test_SeekToEnd_mp4.html]
+[test_SeekToLastFrame_mp4.html]
+[test_SeekTwice_mp4.html]
+[test_Sequence_mp4.html]
+[test_SetModeThrows.html]
+[test_SplitAppendDelay.html]
+[test_SplitAppendDelay_mp4.html]
+[test_SplitAppend.html]
+[test_SplitAppend_mp4.html]
+[test_Threshold_mp4.html]
+[test_TimestampOffset_mp4.html]
+[test_trackidchange_mp4.html]
+[test_TruncatedDuration.html]
+[test_TruncatedDuration_mp4.html]
+[test_WaitingOnMissingData.html]
+[test_WaitingOnMissingData_mp4.html]
+[test_WaitingOnMissingDataEnded_mp4.html]
+[test_WaitingToEndedTransition_mp4.html]
+[test_WebMTagsBeforeCluster.html]
+[test_WMFUnmatchedAudioDataTime.html]
diff --git a/dom/media/mediasource/test/seek.webm b/dom/media/mediasource/test/seek.webm
new file mode 100644
index 0000000000..72b0297233
--- /dev/null
+++ b/dom/media/mediasource/test/seek.webm
Binary files differ
diff --git a/dom/media/mediasource/test/seek.webm^headers^ b/dom/media/mediasource/test/seek.webm^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/seek.webm^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/seek_lowres.webm b/dom/media/mediasource/test/seek_lowres.webm
new file mode 100644
index 0000000000..8a76e06470
--- /dev/null
+++ b/dom/media/mediasource/test/seek_lowres.webm
Binary files differ
diff --git a/dom/media/mediasource/test/seek_lowres.webm^headers^ b/dom/media/mediasource/test/seek_lowres.webm^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/seek_lowres.webm^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/segment-2.0001.m4s b/dom/media/mediasource/test/segment-2.0001.m4s
new file mode 100644
index 0000000000..b63fd6aaa6
--- /dev/null
+++ b/dom/media/mediasource/test/segment-2.0001.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/segment-2.0001.m4s^headers^ b/dom/media/mediasource/test/segment-2.0001.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/segment-2.0001.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/segment-2.0002.m4s b/dom/media/mediasource/test/segment-2.0002.m4s
new file mode 100644
index 0000000000..3a0051f10e
--- /dev/null
+++ b/dom/media/mediasource/test/segment-2.0002.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/segment-2.0002.m4s^headers^ b/dom/media/mediasource/test/segment-2.0002.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/segment-2.0002.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/segment-3.0001.m4s b/dom/media/mediasource/test/segment-3.0001.m4s
new file mode 100644
index 0000000000..71e33f0e8f
--- /dev/null
+++ b/dom/media/mediasource/test/segment-3.0001.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/segment-3.0001.m4s^headers^ b/dom/media/mediasource/test/segment-3.0001.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/segment-3.0001.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/segment-3.0002.m4s b/dom/media/mediasource/test/segment-3.0002.m4s
new file mode 100644
index 0000000000..10a3ce695d
--- /dev/null
+++ b/dom/media/mediasource/test/segment-3.0002.m4s
Binary files differ
diff --git a/dom/media/mediasource/test/segment-3.0002.m4s^headers^ b/dom/media/mediasource/test/segment-3.0002.m4s^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/segment-3.0002.m4s^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/tags_before_cluster.webm b/dom/media/mediasource/test/tags_before_cluster.webm
new file mode 100644
index 0000000000..cf7d596b0e
--- /dev/null
+++ b/dom/media/mediasource/test/tags_before_cluster.webm
Binary files differ
diff --git a/dom/media/mediasource/test/tags_before_cluster.webm^header^ b/dom/media/mediasource/test/tags_before_cluster.webm^header^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/dom/media/mediasource/test/tags_before_cluster.webm^header^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/dom/media/mediasource/test/test_AVC3_mp4.html b/dom/media/mediasource/test/test_AVC3_mp4.html
new file mode 100644
index 0000000000..dd20feed06
--- /dev/null
+++ b/dom/media/mediasource/test/test_AVC3_mp4.html
@@ -0,0 +1,38 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: AVC3 content playback.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+
+ await fetchAndLoad(videosb, "avc3/init", [""], ".mp4");
+ const p = once(el, "loadeddata");
+ await fetchAndLoad(videosb, "avc3/segment", range(1, 2), ".m4s");
+ await p;
+ is(videosb.buffered.length, 1, "continuous buffered range");
+ ok(true, "got loadeddata");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+ ok(true, "endOfStream completed");
+ // Now ensure that we can play to the end.
+ el.play();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_AbortAfterPartialMediaSegment.html b/dom/media/mediasource/test/test_AbortAfterPartialMediaSegment.html
new file mode 100644
index 0000000000..4c695f48c7
--- /dev/null
+++ b/dom/media/mediasource/test/test_AbortAfterPartialMediaSegment.html
@@ -0,0 +1,62 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: can properly resume after a partial media segment header followed by abort </title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+const VIDEO_CODEC_STRING = 'video/webm; codecs="vp09.00.51.08.01.01.01.01"';
+
+const logError = (error) => {
+ console.error(error, error.message);
+ ok(false, "should not reach here");
+};
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+
+ const supported = MediaSource.isTypeSupported(VIDEO_CODEC_STRING);
+ if (!supported) {
+ ok(true, "vp9 isn't supported on this platform, abort");
+ SimpleTest.finish();
+ return;
+ }
+ const sb = ms.addSourceBuffer(VIDEO_CODEC_STRING);
+
+ const arrayBuffer = await fetchWithXHR("1516754.webm");
+ info("- append init segment, a media segment and a partial media segment header -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 87355 + 3000));
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ // start seeking.
+ v.currentTime = 11;
+ v.addEventListener("seeked", () => {
+ info("- seek completed -");
+ SimpleTest.finish();
+ });
+
+ sb.abort();
+
+ info("- append init segment -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 3150));
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+ info("- append media segment 10-15s -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 159968, 72931));
+
+ // We now wait for seek to complete
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_AppendPartialInitSegment.html b/dom/media/mediasource/test/test_AppendPartialInitSegment.html
new file mode 100644
index 0000000000..408c073bd6
--- /dev/null
+++ b/dom/media/mediasource/test/test_AppendPartialInitSegment.html
@@ -0,0 +1,43 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: split init segment and append them separately </title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ // init segment is total 236 bytes.
+ info("- append partial init segment -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 100));
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- append remaining init segment -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 100, 136));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+ is(v.videoWidth, 320, "videoWidth has correct initial value");
+ is(v.videoHeight, 240, "videoHeight has correct initial value");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_AudioChange_mp4.html b/dom/media/mediasource/test/test_AudioChange_mp4.html
new file mode 100644
index 0000000000..9051af05a1
--- /dev/null
+++ b/dom/media/mediasource/test/test_AudioChange_mp4.html
@@ -0,0 +1,49 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test checks loading a stereo segment, followed by a 5.1 segment plays without error.
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ logEvents(el);
+
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(audiosb, "aac20-48000-64000-", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+ p = Promise.all([once(el, "loadeddata"), once(el, "canplay")]);
+ await fetchAndLoad(audiosb, "aac20-48000-64000-", ["1"], ".m4s");
+ await p;
+ ok(true, "got canplay event");
+ el.play();
+ await fetchAndLoad(audiosb, "aac51-48000-128000-", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "aac51-48000-128000-", ["2"], ".m4s");
+ ms.endOfStream();
+ await once(el, "ended");
+ ok(el.currentTime >= 6, "played to the end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_AudioChange_mp4_WebAudio.html b/dom/media/mediasource/test/test_AudioChange_mp4_WebAudio.html
new file mode 100644
index 0000000000..c76342f793
--- /dev/null
+++ b/dom/media/mediasource/test/test_AudioChange_mp4_WebAudio.html
@@ -0,0 +1,55 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test checks loading a stereo segment, followed by a 5.1 segment plays
+// without error, when the audio is being routed to an AudioContext.
+
+const ac = new AudioContext();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ const source = ac.createMediaElementSource(el);
+ source.connect(ac.destination);
+
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ logEvents(el);
+
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(audiosb, "aac20-48000-64000-", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+ p = Promise.all([once(el, "loadeddata"), once(el, "canplay")]);
+ await fetchAndLoad(audiosb, "aac20-48000-64000-", ["1"], ".m4s");
+ await p;
+ ok(true, "got canplay event");
+ el.play();
+ await fetchAndLoad(audiosb, "aac51-48000-128000-", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "aac51-48000-128000-", ["2"], ".m4s");
+ ms.endOfStream();
+ await once(el, "ended");
+ ok(el.currentTime >= 6, "played to the end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_AutoRevocation.html b/dom/media/mediasource/test/test_AutoRevocation.html
new file mode 100644
index 0000000000..42e9b0e6a5
--- /dev/null
+++ b/dom/media/mediasource/test/test_AutoRevocation.html
@@ -0,0 +1,40 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: auto-revocation</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(function() {
+ const ms = new MediaSource();
+ const o = URL.createObjectURL(ms);
+ const v = document.createElement("video");
+
+ v.addEventListener("error", () => {
+ ok(true, "ObjectURL should be auto-revoked");
+ SimpleTest.finish();
+ });
+
+ v.addEventListener("stalled", () => {
+ ok(false, "If auto-revocation is gone, please turn on TODOs in browser_mediaSourceURL.js");
+ SimpleTest.finish();
+ });
+
+ setTimeout(function() {
+ v.src = o;
+ v.preload = "auto";
+ document.body.appendChild(v);
+ }, 0);
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_BufferedSeek.html b/dom/media/mediasource/test/test_BufferedSeek.html
new file mode 100644
index 0000000000..039f56bc16
--- /dev/null
+++ b/dom/media/mediasource/test/test_BufferedSeek.html
@@ -0,0 +1,44 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seeking in buffered range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm")));
+
+ const target = 2;
+
+ v.addEventListener("loadedmetadata", () => {
+ ok(true, "received loadedmetadata");
+ v.currentTime = target;
+ });
+
+ let wasSeeking = false;
+
+ v.addEventListener("seeking", () => {
+ wasSeeking = true;
+ is(v.currentTime, target, "Video currentTime at target");
+ });
+
+ await once(v, "seeked");
+ ok(wasSeeking, "Received expected seeking and seeked events");
+ is(v.currentTime, target, "Video currentTime at target");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_BufferedSeek_mp4.html b/dom/media/mediasource/test/test_BufferedSeek_mp4.html
new file mode 100644
index 0000000000..e89e972c91
--- /dev/null
+++ b/dom/media/mediasource/test/test_BufferedSeek_mp4.html
@@ -0,0 +1,43 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seeking in buffered range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bipbop/bipbop2s.mp4")));
+
+ const target = 1.3;
+
+ await once(v, "loadedmetadata");
+ ok(true, "received loadedmetadata");
+ v.currentTime = target;
+
+ let wasSeeking = false;
+
+ v.addEventListener("seeking", () => {
+ wasSeeking = true;
+ is(v.currentTime, target, "Video currentTime at target");
+ });
+
+ await once(v, "seeked");
+ ok(wasSeeking, "Received expected seeking and seeked events");
+ is(v.currentTime, target, "Video currentTime at target");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_BufferingWait.html b/dom/media/mediasource/test/test_BufferingWait.html
new file mode 100644
index 0000000000..289ddfe4d2
--- /dev/null
+++ b/dom/media/mediasource/test/test_BufferingWait.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Don't get stuck buffering for too long when we have frames to show</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ ms.addEventListener("sourceopen", () => ok(false, "No more sourceopen"));
+ const sb = ms.addSourceBuffer("video/webm");
+ ok(sb, "Create a SourceBuffer");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.addEventListener("error", e => {
+ ok(false, "Got Error: " + e);
+ SimpleTest.finish();
+ });
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 0, 318));
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 318, 25523 - 318));
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 25523, 46712 - 25523));
+ /* Note - Missing |46712, 67833 - 46712| segment here corresponding to (0.8, 1.2] */
+ /* Note - Missing |67833, 88966 - 67833| segment here corresponding to (1.2, 1.6] */
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 88966));
+ // 0.767 is the time of the last video sample +- 40ms.
+ info("Playing video. It should play for a bit, then fire 'waiting'");
+ v.play();
+ await waitUntilTime(v, .767 - 0.04);
+ const firstStop = Date.now();
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 46712, 67833 - 46712));
+ await waitUntilTime(v, 1.167 - 0.04);
+ const waitDuration = (Date.now() - firstStop) / 1000;
+ ok(waitDuration < 15, `Should not spend inordinate amount of time buffering: ${waitDuration}`);
+ SimpleTest.finish();
+ /* If we allow the rest of the stream to be played, we get stuck at
+ around 2s. See bug 1093133.
+ await once(v, "ended");
+ SimpleTest.finish();
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 67833, 88966 - 67833));
+ */
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_BufferingWait_mp4.html b/dom/media/mediasource/test/test_BufferingWait_mp4.html
new file mode 100644
index 0000000000..04e094a852
--- /dev/null
+++ b/dom/media/mediasource/test/test_BufferingWait_mp4.html
@@ -0,0 +1,49 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Don't get stuck buffering for too long when we have frames to show</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ ms.addEventListener("sourceopen", () => ok(false, "No more sourceopen"));
+ const sb = ms.addSourceBuffer("video/mp4");
+ ok(sb, "Create a SourceBuffer");
+
+ sb.addEventListener("error", e => {
+ ok(false, "Got Error: " + e);
+ SimpleTest.finish();
+ });
+ await fetchAndLoad(sb, "bipbop/bipbop", ["init"], ".mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop", ["1"], ".m4s");
+ await fetchAndLoad(sb, "bipbop/bipbop", ["2"], ".m4s");
+ /* Note - Missing |bipbop3| segment here corresponding to (1.62, 2.41] */
+ /* Note - Missing |bipbop4| segment here corresponding to (2.41, 3.20] */
+ await fetchAndLoad(sb, "bipbop/bipbop", ["5"], ".m4s");
+ // last audio sample has a start time of 1.578956s
+ info("Playing video. It should play for a bit, then fire 'waiting'");
+ v.play();
+ await waitUntilTime(v, 1.57895);
+ const firstStop = Date.now();
+ await fetchAndLoad(sb, "bipbop/bipbop", ["3"], ".m4s");
+ // last audio sample has a start time of 2.368435
+ await waitUntilTime(v, 2.36843);
+ const waitDuration = (Date.now() - firstStop) / 1000;
+ ok(waitDuration < 15, `Should not spend inordinate amount of time buffering: ${waitDuration}`);
+ await fetchAndLoad(sb, "bipbop/bipbop", ["4"], ".m4s");
+ ms.endOfStream();
+ await once(v, "ended");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_ChangeType.html b/dom/media/mediasource/test/test_ChangeType.html
new file mode 100644
index 0000000000..690b9f61c4
--- /dev/null
+++ b/dom/media/mediasource/test/test_ChangeType.html
@@ -0,0 +1,84 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: changeType allow to change container and codec type</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(function(ms, el) {
+ el.controls = true;
+ once(ms, "sourceopen").then(function() {
+ // Log events for debugging.
+ const events = ["suspend", "play", "canplay", "canplaythrough", "loadstart", "loadedmetadata",
+ "loadeddata", "playing", "ended", "error", "stalled", "emptied", "abort",
+ "waiting", "pause", "durationchange", "seeking", "seeked"];
+ function logEvent(e) {
+ info("got " + e.type + " event");
+ }
+ events.forEach(function(e) {
+ el.addEventListener(e, logEvent);
+ });
+
+ ok(true, "Receive a sourceopen event");
+
+ const videosb = ms.addSourceBuffer("video/mp4");
+ if (typeof videosb.changeType === "undefined") {
+ info("changeType API is not available");
+ }
+
+ el.addEventListener("error", e => {
+ ok(false, "should not fire '" + e.type + "' event");
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ const loadedmetadataPromises = [];
+ loadedmetadataPromises.push(fetchAndLoad(videosb, "bipbop/bipbop", ["init"], ".mp4"));
+ loadedmetadataPromises.push(once(el, "loadedmetadata"));
+ Promise.all(loadedmetadataPromises)
+ .then(function() {
+ ok(true, "got loadedmetadata event");
+ const canplayPromises = [];
+ canplayPromises.push(once(el, "loadeddata"));
+ canplayPromises.push(once(el, "canplay"));
+ canplayPromises.push(fetchAndLoad(videosb, "bipbop/bipbop", range(1, 3), ".m4s"));
+ return Promise.all(canplayPromises);
+ })
+ .then(function() {
+ ok(true, "got canplay event");
+ el.play();
+ videosb.timestampOffset = el.buffered.end(0);
+ return fetchAndLoad(videosb, "bipbop/bipbop_480_624kbps-video", ["init"], ".mp4");
+ })
+ .then(fetchAndLoad.bind(null, videosb, "bipbop/bipbop_480_624kbps-video", range(1, 3), ".m4s"))
+ .then(function() {
+ videosb.timestampOffset = el.buffered.end(0);
+ try {
+ videosb.changeType("video/webm");
+ } catch (e) {
+ ok(false, "shouldn't throw an exception");
+ SimpleTest.finish();
+ throw e;
+ }
+ return fetchAndLoad(videosb, "bipbop/bipbop_300-3s", [""], ".webm");
+ })
+ .then(function() {
+ ms.endOfStream();
+ return once(el, "ended");
+ })
+ .then(function() {
+ ok(el.currentTime >= el.buffered.end(0), "played to the end");
+ SimpleTest.finish();
+ });
+ });
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_ChangeWhileWaitingOnMissingData_mp4.html b/dom/media/mediasource/test/test_ChangeWhileWaitingOnMissingData_mp4.html
new file mode 100644
index 0000000000..b5889da560
--- /dev/null
+++ b/dom/media/mediasource/test/test_ChangeWhileWaitingOnMissingData_mp4.html
@@ -0,0 +1,37 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: resume from waiting even after format change occurred</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const sb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", ["init"], ".mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", range(1, 3), ".m4s");
+ el.play();
+ // let seek to the last audio frame.
+ // The seek will complete and then playback will stall.
+ el.currentTime = 1.532517;
+ await Promise.all([once(el, "seeked"), once(el, "waiting")]);
+ info("seek completed");
+ await fetchAndLoad(sb, "bipbop/bipbop", ["init"], ".mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop", range(1, 4), ".m4s");
+ ms.endOfStream();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_DifferentStreamStartTimes.html b/dom/media/mediasource/test/test_DifferentStreamStartTimes.html
new file mode 100644
index 0000000000..197e809e4f
--- /dev/null
+++ b/dom/media/mediasource/test/test_DifferentStreamStartTimes.html
@@ -0,0 +1,54 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: Mismatched stream start time playback test</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ el.autoplay = true;
+ for (let i = 0; i <= 0.5; i += 0.1) {
+ const offset = i.toFixed(1);
+
+ info("----------------------");
+ info("Running test for mismatched stream start times with offset of: " + offset);
+ info("----------------------");
+
+ ms = new MediaSource();
+ el.removeAttribute("src");
+ el.src = URL.createObjectURL(ms);
+
+ await once(ms, "sourceopen");
+ logEvents(el);
+ const videosb = ms.addSourceBuffer("video/mp4");
+ ok(true, "Receive a sourceopen event");
+
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_offset_" + offset + "-", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+
+ p = Promise.all(["loadeddata", "canplay", "play", "playing"].map(e => once(el, e)));
+ await fetchAndLoad(videosb, "bipbop/bipbop_offset_" + offset + "-", range(1, 2), ".m4s");
+ el.play();
+ await p;
+ }
+ ok(true, "got all required event");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_DrainOnMissingData_mp4.html b/dom/media/mediasource/test/test_DrainOnMissingData_mp4.html
new file mode 100644
index 0000000000..ddc503aebf
--- /dev/null
+++ b/dom/media/mediasource/test/test_DrainOnMissingData_mp4.html
@@ -0,0 +1,49 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: |waiting| event when source data is missing</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ // Set appendWindowEnd to ensure we only have about 6 frames worth.
+ // We must feed at least 6 frames to pass the MDSM pre-roll.
+ videosb.appendWindowEnd = .4;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["1"], ".m4s");
+ info("Invoking play()");
+ const p = once(el, "playing");
+ await el.play();
+ await p;
+ info("got playing");
+ await once(el, "waiting");
+ info("got waiting");
+ info("Loading more data");
+ // Waiting will be fired on the last frame +- 40ms.
+ isfuzzy(el.currentTime, videosb.buffered.end(0) - 1 / 30,
+ 0.04, `Got a waiting event at ${el.currentTime}`);
+ videosb.appendWindowEnd = 1;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", [1], ".m4s");
+ ms.endOfStream();
+ await once(el, "ended");
+ // These fuzz factors are bigger than they should be. We should investigate
+ // and fix them in bug 1137574.
+ is(el.duration, 0.801666, "Video has correct duration: " + el.duration);
+ is(el.currentTime, el.duration, "Video has correct currentTime.");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_DurationChange.html b/dom/media/mediasource/test/test_DurationChange.html
new file mode 100644
index 0000000000..3c83e83fa4
--- /dev/null
+++ b/dom/media/mediasource/test/test_DurationChange.html
@@ -0,0 +1,71 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: check that duration change behaves properly</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 318));
+ await Promise.all([once(v, "loadedmetadata"), once(sb, "updateend")]);
+ is(v.duration, ms.duration, "video duration is mediasource one");
+ must_not_throw(() => ms.duration = 0, "duration = 0 is valid initially");
+ is(v.duration, 0, "reducing duration with no data buffered is valid");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 318));
+ // Adding more data will fire durationchange.
+ await once(sb, "updateend");
+ ok(true, "got updateend");
+ // XXX: Duration should be exactly 4.0, see bug 1065207.
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ must_throw(() => ms.duration = 0,
+ "Must use remove for range removal",
+ "InvalidStateError");
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ must_not_throw(() => ms.duration = 10, "setting duration past data is valid");
+ is(v.duration, 10, "extending duration is always valid");
+ // The last sample has a start time of 3.967000s and a end time of 4.001 (see bug 1065207).
+ must_not_throw(() => ms.duration = 3.967000,
+ "setting duration with >= highest frame presentation time is valid");
+ is(v.duration, sb.buffered.end(0),
+ "duration is the highest end time reported by the buffered attribute ");
+ must_not_throw(() => ms.duration = 3.97,
+ "setting duration with >= highest frame presentation time is valid");
+ is(v.duration, sb.buffered.end(0),
+ "duration is the highest end time reported by the buffered attribute ");
+ must_throw(() => ms.duration = 3.96,
+ "setting duration with < highest frame presentation time is not valid",
+ "InvalidStateError");
+ is(v.duration, sb.buffered.end(0),
+ "duration is the highest end time reported by the buffered attribute ");
+ must_throw(() => ms.duration = -1, "can't set a negative duration", "TypeError");
+ sb.remove(sb.buffered.end(0), Infinity);
+ is(sb.updating, true, "updating is true");
+ must_throw(() => ms.duration = Infinity,
+ "setting the duration while updating is not allowed",
+ "InvalidStateError");
+ must_throw(() => sb.abort(),
+ "Can't use abort while range removal is in progress",
+ "InvalidStateError");
+ is(v.duration, sb.buffered.end(0),
+ "duration is the highest end time reported by the buffered attribute ");
+ await once(sb, "updateend");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_DurationUpdated.html b/dom/media/mediasource/test/test_DurationUpdated.html
new file mode 100644
index 0000000000..eb54e76c90
--- /dev/null
+++ b/dom/media/mediasource/test/test_DurationUpdated.html
@@ -0,0 +1,48 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append data and check that mediasource duration got updated</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ let durationChangeCount = 0;
+ v.addEventListener("durationchange", () => durationChangeCount++);
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 318));
+
+ // Adding the first init segment will fire a durationchange.
+ await Promise.all([once(sb, "updateend"), once(v, "loadedmetadata")]);
+ ok(true, "got loadedmetadata");
+ // Set mediasource duration to 0, so future appendBuffer
+ // will update the mediasource duration.
+ // Changing the duration will fire a durationchange.
+ ms.duration = 0;
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 318));
+ // Adding more data will fire durationchange.
+ await once(sb, "updateend");
+ ok(true, "got updateend");
+ // this will not fire durationchange as new duration == old duration
+ ms.endOfStream();
+ await once(ms, "sourceended");
+ is(durationChangeCount, 3, "durationchange not fired as many times as expected");
+ // XXX: Duration should be exactly 4.0, see bug 1065207.
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_DurationUpdated_mp4.html b/dom/media/mediasource/test/test_DurationUpdated_mp4.html
new file mode 100644
index 0000000000..f263264b09
--- /dev/null
+++ b/dom/media/mediasource/test/test_DurationUpdated_mp4.html
@@ -0,0 +1,47 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append data and check that mediasource duration got updated</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ let durationChangeCount = 0;
+ v.addEventListener("durationchange", () => durationChangeCount++);
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 1395));
+
+ // Adding the first init segment will fire a durationchange.
+ await Promise.all([once(sb, "updateend"), once(v, "loadedmetadata")]);
+ ok(true, "got loadedmetadata");
+ // Set mediasource duration to 0, so future appendBuffer
+ // will update the mediasource duration.
+ // Changing the duration will fire a durationchange.
+ ms.duration = 0;
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 1395));
+ // Adding more data will fire durationchange.
+ await once(sb, "updateend");
+ ok(true, "got updateend");
+ // this will not fire durationchange as new duration == old duration
+ ms.endOfStream();
+ await once(ms, "sourceended");
+ is(durationChangeCount, 3, "durationchange not fired as many times as expected");
+ is(v.duration, 1.696666, "Video has correct duration");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_EndOfStream.html b/dom/media/mediasource/test/test_EndOfStream.html
new file mode 100644
index 0000000000..b926869f1f
--- /dev/null
+++ b/dom/media/mediasource/test/test_EndOfStream.html
@@ -0,0 +1,29 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: endOfStream call after an appendBuffer</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm"), 0, 88966));
+ await once(sb, "updateend");
+ await wait(0);
+ must_not_throw(() => ms.endOfStream(), "MediaSource.endOfStream succeeded");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_EndOfStream_mp4.html b/dom/media/mediasource/test/test_EndOfStream_mp4.html
new file mode 100644
index 0000000000..9319b80390
--- /dev/null
+++ b/dom/media/mediasource/test/test_EndOfStream_mp4.html
@@ -0,0 +1,29 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: endOfStream call after an appendBuffer</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bipbop/bipbop2s.mp4")));
+ await once(sb, "updateend");
+ await wait(0);
+ must_not_throw(() => ms.endOfStream(), "MediaSource.endOfStream succeeded");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_EndedEvent.html b/dom/media/mediasource/test/test_EndedEvent.html
new file mode 100644
index 0000000000..ee43fa8cf5
--- /dev/null
+++ b/dom/media/mediasource/test/test_EndedEvent.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm")));
+ sb.addEventListener("updateend", () => ms.endOfStream());
+
+ // Test "ended" is fired when seeking to the end of the media
+ // once the duration is known.
+ ms.onsourceended = () => el.currentTime = el.duration;
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_Eviction_mp4.html b/dom/media/mediasource/test/test_Eviction_mp4.html
new file mode 100644
index 0000000000..e336fae4c7
--- /dev/null
+++ b/dom/media/mediasource/test/test_Eviction_mp4.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: QuotaExceededError when source buffer is full</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+// We fill up the source buffer with audio data until the buffer is full.
+// We ensure that QuotaExceededError is thrown once the buffer is full.
+// We then seek to half the content. By that time, another appendBuffer must succeed
+// as the auto-eviction would succeed (removing all data prior currentTime)
+
+addMSEPrefs(
+ ["media.mediasource.eviction_threshold.audio", 524288],
+ ["media.dormant-on-pause-timeout-ms", -1] // FIXME: bug 1319292
+);
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ audiosb.mode = "sequence";
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ const audioBuffer = await fetchWithXHR("bipbop/bipbop_audio1.m4s");
+
+ await must_reject(async () => {
+ // We are appending data repeatedly in sequence mode, there should be no gaps.
+ while (true) {
+ ok(audiosb.buffered.length <= 1, "there should be no gap in buffered ranges.");
+ audiosb.appendBuffer(audioBuffer);
+ await once(audiosb, "updateend");
+ }
+ },
+ "Fill up SourceBuffer by appending data until an exception is thrown.",
+ "QuotaExceededError");
+
+ is(audiosb.buffered.end(0), el.duration, "Duration is end of buffered range");
+ const seekTime = audiosb.buffered.end(0) / 2;
+ el.currentTime = seekTime;
+ await once(el, "seeked");
+ dump("dump: seeked to " + seekTime);
+ is(el.currentTime, seekTime, "correctly seeked to " + seekTime);
+ try {
+ audiosb.appendBuffer(audioBuffer);
+ await once(audiosb, "update");
+ ok(true, "appendBuffer succeeded");
+ } catch (ex) {
+ ok(false, "Shouldn't throw another time when data can be evicted");
+ dump(JSON.stringify(await SpecialPowers.wrap(el).mozRequestDebugInfo()));
+ }
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_ExperimentalAsync.html b/dom/media/mediasource/test/test_ExperimentalAsync.html
new file mode 100644
index 0000000000..6617716f26
--- /dev/null
+++ b/dom/media/mediasource/test/test_ExperimentalAsync.html
@@ -0,0 +1,102 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: testing removeAsync and appendBufferAsync</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+addMSEPrefs(
+ ["media.mediasource.eviction_threshold.audio", 524288],
+ ["media.dormant-on-pause-timeout-ms", -1], // FIXME: bug 1319292
+ ["media.mediasource.experimental.enabled", true]
+);
+
+// We fill up the source buffer with audio data until the buffer is full.
+// We ensure that QuotaExceededError is thrown once the buffer is full.
+// We then seek to half the content. By that time, another appendBuffer must succeed
+// as the auto-eviction would succeed (removing all data prior currentTime)
+// The test then fills the audio buffer and plays until the end.
+
+// Fill up the SourceBuffer by appending data repeatedly via doAppendDataFunc until
+// an exception is thrown.
+async function fillUpSourceBuffer(sourceBuffer, doAppendDataFunc, onCaughtExceptionCallback) {
+ try {
+ // We are appending data repeatedly in sequence mode, there should be no gaps.
+ while (true) {
+ ok(sourceBuffer.buffered.length <= 1, "there should be no gap in buffered ranges.");
+ await doAppendDataFunc();
+ }
+ } catch (ex) {
+ ok(true, "appendBuffer promise got rejected");
+ onCaughtExceptionCallback(ex);
+ }
+}
+
+runWithMSE(async function(ms, el) {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+
+ // Test removeAsync
+ audiosb.mode = "sequence";
+ const audioInitBuffer = await fetchWithXHR("bipbop/bipbop_audioinit.mp4");
+ await audiosb.appendBufferAsync(audioInitBuffer);
+ const audioBuffer = await fetchWithXHR("bipbop/bipbop_audio1.m4s");
+ fillUpSourceBuffer(audiosb,
+ function() { // doAppendDataFunc
+ return audiosb.appendBufferAsync(audioBuffer);
+ },
+ async function(ex1) { // onCaughtExceptionCallback
+ is(ex1.name, "QuotaExceededError", "QuotaExceededError thrown");
+ is(audiosb.buffered.end(0), el.duration, "Duration is end of buffered range");
+ const seekTime = audiosb.buffered.end(0) / 2;
+ el.currentTime = seekTime;
+ await once(el, "seeked");
+ dump("dump: seeked to " + seekTime);
+ is(el.currentTime, seekTime, "correctly seeked to " + seekTime);
+ await audiosb.appendBufferAsync(audioBuffer).catch(async function(ex2) {
+ ok(false, "Shouldn't throw another time when data can be evicted");
+ dump(JSON.stringify(await SpecialPowers.wrap(el).mozRequestDebugInfo()));
+ SimpleTest.finish();
+ });
+ // Test that an error in remove return a rejected promise
+ await audiosb.removeAsync(5, 0).catch(async function(ex3) {
+ ok(true, "remove promise got rejected with end <= start");
+ is(ex3.name, "TypeError");
+ await audiosb.removeAsync(ms.duration + 1, Infinity).catch(async function(ex4) {
+ ok(true, "remove promise got rejected with start > duration");
+ is(ex4.name, "TypeError");
+ await audiosb.removeAsync(0, Infinity).catch(function(ex5) {
+ ok(false, "shouldn't throw");
+ });
+ ok(true, "remove succeeded");
+ is(audiosb.buffered.length, 0, "buffered should be empty");
+ audiosb.mode = "segment";
+ audiosb.timestampOffset = 0;
+ el.currentTime = 0;
+ await fetchAndLoadAsync(audiosb, "bipbop/bipbop_audio", range(1, 4), ".m4s");
+ ms.endOfStream();
+ el.play();
+ await once(el, "ended");
+ is(el.currentTime, el.duration, "played to the end");
+ SimpleTest.finish();
+ throw ex4; // ensure we don't fallback on lines below.
+ });
+ ok(false, "should have returned an error");
+ });
+ ok(false, "should have returned an error");
+ }
+ );
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_FrameSelection.html b/dom/media/mediasource/test/test_FrameSelection.html
new file mode 100644
index 0000000000..3e696841c2
--- /dev/null
+++ b/dom/media/mediasource/test/test_FrameSelection.html
@@ -0,0 +1,64 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: verify correct frames selected for given position</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ let arrayBuffer = await fetchWithXHR("seek.webm");
+ let p = once(v, "loadedmetadata");
+ // Append entire file covering range [0, 4].
+ sb.appendBuffer(new Uint8Array(arrayBuffer));
+ await p;
+ is(v.currentTime, 0, "currentTime has correct initial value");
+ is(v.videoWidth, 320, "videoWidth has correct initial value");
+ is(v.videoHeight, 240, "videoHeight has correct initial value");
+
+ arrayBuffer = await fetchWithXHR("seek_lowres.webm");
+ // Append initialization segment.
+ info("Appending low-res init segment");
+ p = once(sb, "updateend");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 438));
+ await p;
+
+ info("Appending low-res range [2,4]");
+ // Append media segment covering range [2, 4].
+ p = once(sb, "updateend");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 51003));
+ await p;
+
+ ms.endOfStream();
+
+ info("Seeking to t=3");
+ p = Promise.all([once(v, "seeked"), once(v, "resize")]);
+ v.currentTime = 3;
+ await p;
+ is(v.currentTime, 3, "Video currentTime at target");
+ is(v.videoWidth, 160, "videoWidth has correct low-res value");
+ is(v.videoHeight, 120, "videoHeight has correct low-res value");
+
+ info("Seeking to t=1");
+ p = Promise.all([once(v, "seeked"), once(v, "resize")]);
+ v.currentTime = 1;
+ await p;
+ is(v.currentTime, 1, "Video currentTime at target");
+ is(v.videoWidth, 320, "videoWidth has correct high-res value");
+ is(v.videoHeight, 240, "videoHeight has correct high-res value");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_FrameSelection_mp4.html b/dom/media/mediasource/test/test_FrameSelection_mp4.html
new file mode 100644
index 0000000000..628b4bf0e9
--- /dev/null
+++ b/dom/media/mediasource/test/test_FrameSelection_mp4.html
@@ -0,0 +1,49 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Don't get stuck buffering for too long when we have frames to show</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test loads partial video, plays and waits until playback stalls.
+// It then loads only 3 frames of a video at higher resolution.
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ ms.addEventListener("sourceopen", () => ok(false, "No more sourceopen"));
+ const sb = ms.addSourceBuffer("video/mp4");
+ ok(sb, "Create a SourceBuffer");
+ logEvents(v);
+ sb.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ await fetchAndLoad(sb, "bipbop/bipbop", ["init"], ".mp4");
+ const p = once(v, "loadeddata");
+ await fetchAndLoad(sb, "bipbop/bipbop", range(1, 3), ".m4s");
+ await p;
+ is(sb.buffered.length, 1, "continuous range");
+ v.play();
+ // We have nothing to play, waiting will be fired.
+ await waitUntilTime(v, 1.5);
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", ["init"], ".mp4");
+ sb.timestampOffset = 1.601666; // End of the video track buffered - time of first video sample (0.095).
+ sb.appendWindowEnd = 1.796677; // Only allow room for three extra video frames (we need 3 as this video has b-frames).
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", ["1"], ".m4s");
+ ms.endOfStream();
+ await Promise.all([once(ms, "sourceended"), once(v, "playing"), once(v, "ended")]);
+ is(v.videoWidth, 640, "has proper width");
+ is(v.videoHeight, 480, "has proper height");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_HEAAC_extradata.html b/dom/media/mediasource/test/test_HEAAC_extradata.html
new file mode 100644
index 0000000000..9fbbec8d72
--- /dev/null
+++ b/dom/media/mediasource/test/test_HEAAC_extradata.html
@@ -0,0 +1,89 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>HE-AAC decoding test</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+
+SimpleTest.waitForExplicitFinish();
+
+const SOURCE_FILE = "whitenoise-he-aac-5s.mp4";
+
+// This test checks when decoding HE-AAC using MediaSource or HTTP playback, the
+// audio is decoded correctly (in particular with the SBR part). This means
+// that the extradata describing the encoded AAC stream have been communicated
+// correctly to the audio decoder. For this, we check that there is energy
+// above 10kHz using the Web Audio API when playing white noise, which has
+// maximum energy accross the audible spectrum.
+
+// Return the index corresponding for a particular frequency in an array
+// containing frequency data from a FFT.
+function binIndexForFrequency(frequency, fftSize, sampleRate) {
+ return (1 + Math.round((frequency * fftSize) / sampleRate));
+}
+
+async function checkHighFrequencyContent(element) {
+ const ac = new AudioContext();
+ await ac.resume();
+ const mediaElementSource = ac.createMediaElementSource(element);
+ const analyser = new AnalyserNode(ac);
+
+ // Undo the volume scaling applied globally during test. This is fine because
+ // the audio isn't routed to an actual audio output device in this test, it's
+ // just analyzed with the Web Audio API.
+ const gain = new GainNode(ac);
+ const testVolumeScaling =
+ parseFloat(SpecialPowers.getCharPref("media.volume_scale"));
+ gain.gain.value = 1 / parseFloat(testVolumeScaling);
+ mediaElementSource.connect(gain).connect(analyser)
+
+ const spectrum = new Float32Array(analyser.frequencyBinCount);
+ const indexFor15kHz =
+ binIndexForFrequency(15000, analyser.fftSize, ac.sampleRate);
+ // Wait a few hundreds of milliseconds
+ while (!element.ended) {
+ await once(element, "timeupdate");
+ analyser.getFloatFrequencyData(spectrum);
+ if (spectrum[indexFor15kHz] > -50) {
+ ok(spectrum[indexFor15kHz] > -50,
+ `Energy present at 15kHz (bin index: ${indexFor15kHz}) when playing white noise encoded in HE-AAC ${spectrum[indexFor15kHz]}`);
+ return;
+ }
+ }
+ ok(false,
+ `No energy present at 15kHz (bin index: ${indexFor15kHz}) when playing white noise encoded in HE-AAC (last value ${spectrum[indexFor15kHz]})`);
+}
+
+runWithMSE(async (ms, el) => {
+ // First check with MSE playback
+ el.controls = true;
+ await once(ms, "sourceopen");
+
+ const audiosb = ms.addSourceBuffer('audio/mp4; codecs="mp4a.40.5"');
+ await fetchAndLoad(audiosb, SOURCE_FILE, [""], "");
+ ms.endOfStream();
+ el.play();
+ once(el, "playing");
+
+ await checkHighFrequencyContent(el);
+
+ // Redo the same test, with HTTP playback
+ el.src = SOURCE_FILE;
+ el.play();
+ once(el, "playing");
+
+ await checkHighFrequencyContent(el);
+
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek.html b/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek.html
new file mode 100644
index 0000000000..dd1b252f01
--- /dev/null
+++ b/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek.html
@@ -0,0 +1,38 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute before end of stream</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 67833));
+
+ const target = 2;
+
+ await once(v, "loadeddata");
+ ok(v.readyState >= v.HAVE_CURRENT_DATA, "readyState is >= CURRENT_DATA");
+ v.currentTime = target;
+
+ await once(v, "seeking");
+ is(v.readyState, v.HAVE_METADATA, "readyState is HAVE_METADATA");
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm"), 67833));
+ await once(v, "seeked");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek_mp4.html b/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek_mp4.html
new file mode 100644
index 0000000000..9b8e885cda
--- /dev/null
+++ b/dom/media/mediasource/test/test_HaveMetadataUnbufferedSeek_mp4.html
@@ -0,0 +1,42 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute before end of stream</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ // 25819 is the offset of the first media segment's end
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 25819));
+
+ const target = 1.3;
+
+ await once(v, "loadeddata");
+ ok(v.readyState >= v.HAVE_CURRENT_DATA, "readyState is >= CURRENT_DATA");
+ v.currentTime = target;
+
+ await once(v, "seeking");
+ is(v.readyState, v.HAVE_METADATA);
+ // 25819 is the offset of the first media segment's end
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 25819));
+ await once(sb, "updateend");
+ ms.endOfStream();
+ await once(v, "seeked");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_InputBufferIsCleared.html b/dom/media/mediasource/test/test_InputBufferIsCleared.html
new file mode 100644
index 0000000000..bad9a0c558
--- /dev/null
+++ b/dom/media/mediasource/test/test_InputBufferIsCleared.html
@@ -0,0 +1,58 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: input buffer is cleared as expected (bug 1697476)</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// Test bug 1697476 is fixed. We do this by appending a number of segments with
+// trailing `skip` boxes. If the bug is fixed, then the data from these appends
+// will eventually be cleared from memory. If not fixed, we leak that memory.
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_video", ["init"], ".mp4");
+ // Load ~1mb of media.
+ await fetchAndLoad(sb, "bipbop/bipbop_trailing_skip_box_video", ["1"], ".m4s");
+ // Load ~1mb more media several more times.
+ const numberOfAppends = 5;
+ for (let i = 1; i < numberOfAppends; ++i) {
+ sb.timestampOffset = v.buffered.end(0);
+ await fetchAndLoad(sb, "bipbop/bipbop_trailing_skip_box_video", ["1"], ".m4s");
+ }
+
+ // Grab a memory report. We'll use this to make sure we're not accumulating
+ // too much data in our buffers.
+ const mgr = SpecialPowers.Cc["@mozilla.org/memory-reporter-manager;1"]
+ .getService(SpecialPowers.Ci.nsIMemoryReporterManager);
+
+ let amount = 0;
+ const handleReport = (aProcess, aPath, aKind, aUnits, aAmount) => {
+ if (aPath == "explicit/media/resources") {
+ amount += aAmount;
+ }
+ };
+
+ await new Promise(r => mgr.getReports(handleReport, null, r, null, /* anonymized = */ false));
+ ok(true, "Yay didn't crash!");
+ ok(amount !== undefined, "Got media resources amount");
+ const sgementSize = 1023860;
+ // Set the limit to be equal to the total data we appended. If we're not
+ // clearing buffers, we'll have all the data from the appends + some other
+ // data, so will fail.
+ const limit = sgementSize * numberOfAppends - 1;
+ ok(amount < limit, `Should have less than ${limit} bytes of media usage. Got ${amount} bytes.`);
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_LiveSeekable.html b/dom/media/mediasource/test/test_LiveSeekable.html
new file mode 100644
index 0000000000..f48852f6af
--- /dev/null
+++ b/dom/media/mediasource/test/test_LiveSeekable.html
@@ -0,0 +1,84 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: live seekable range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ // Load data with a +2 offset so that we can distinguish buffered range start
+ // and seekable range start.
+ sb.timestampOffset = 2;
+ const p = once(v, "loadedmetadata");
+ await fetchAndLoad(sb, "seek", [""], ".webm");
+ await p;
+ ms.duration = Infinity;
+ sb.abort();
+ is(sb.buffered.length, 1, "continuous buffered range");
+ is(sb.buffered.start(0), 2, "buffered range start at timestamp offset");
+ is(sb.buffered.end(0), 6.001, "buffered range end at original duration + timestamp offset");
+ is(v.seekable.length, 1, "continuous seekable range");
+ is(v.seekable.start(0), 0, "seekable range start at 0");
+ is(v.seekable.end(0), sb.buffered.end(0), "seekable range end at buffered end");
+
+ // LiveSeekableRange.start < buffered.start
+ ms.setLiveSeekableRange(1, 5);
+ is(v.seekable.length, 1, "continuous seekable range");
+ is(v.seekable.start(0), 1, "seekable range start at live range start");
+ is(v.seekable.end(0), sb.buffered.end(0), "seekable range end at buffered end");
+
+ ms.clearLiveSeekableRange();
+ is(v.seekable.length, 1, "continuous seekable range");
+ is(v.seekable.start(0), 0, "seekable range start at 0");
+ is(v.seekable.end(0), sb.buffered.end(0), "seekable range end at buffered end");
+
+ // LiveSeekableRange.end > buffered.end
+ ms.setLiveSeekableRange(1, 8);
+ is(v.seekable.start(0), 1, "seekable range start at live range start");
+ is(v.seekable.end(0), 8, "seekable range end at live range end");
+
+ // LiveSeekableRange.start > buffered.start
+ // LiveSeekableRange.end < buffered.end
+ ms.setLiveSeekableRange(3, 5);
+ is(v.seekable.start(0), sb.buffered.start(0), "seekable range start at buffered start");
+ is(v.seekable.end(0), sb.buffered.end(0), "seekable range end at live range end");
+
+ // LiveSeekableRange.start > buffered.end
+ ms.setLiveSeekableRange(8, 10);
+ is(v.seekable.start(0), sb.buffered.start(0), "seekable range start at buffered start");
+ is(v.seekable.end(0), 10, "seekable range end at live range end");
+
+ // LiveSeekableRange.end < buffered.start
+ ms.setLiveSeekableRange(0, 2);
+ is(v.seekable.start(0), 0, "seekable range start at live range start");
+ is(v.seekable.end(0), sb.buffered.end(0), "seekable range end at buffered end");
+
+ must_throw(() => ms.setLiveSeekableRange(2, 0),
+ "must thow if start > end",
+ "TypeError");
+
+ must_throw(() => ms.setLiveSeekableRange(2, 0),
+ "must thow if start > end",
+ "TypeError");
+
+ ms.setLiveSeekableRange(0, 1e300);
+ is(v.seekable.start(0), 0, "seekable range start at live range start");
+ is(v.seekable.end(0), 1e300, "seekable range end at specified time");
+
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_LoadedDataFired_mp4.html b/dom/media/mediasource/test/test_LoadedDataFired_mp4.html
new file mode 100644
index 0000000000..476303d4fd
--- /dev/null
+++ b/dom/media/mediasource/test/test_LoadedDataFired_mp4.html
@@ -0,0 +1,57 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: Check that playback only starts once we have data at time = 0</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ el.addEventListener("loadeddata", () => {
+ ok(el.buffered.length, "data is buffered");
+ is(el.buffered.start(0), 0, "must fire loadeddata when data has been loaded");
+ is(el.currentTime, 0, "must fire loadeddata at start");
+ });
+ el.addEventListener("playing", () => {
+ ok(el.buffered.length, "data is buffered");
+ is(el.buffered.start(0), 0, "must fire playing when data has been loaded");
+ ok(el.currentTime >= 0, "must have started playback");
+ });
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await p;
+ videosb.appendWindowStart = 2;
+ videosb.appendWindowEnd = 4;
+ is(el.readyState, el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ // Load [2.4, 3.968344). 2.4 as it's the first keyframe after 2s and
+ // 3.968344 as the last frame ends after 4s.
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 8), ".m4s");
+ is(el.readyState, el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ // test that appendWindowEnd did its job.
+ ok(el.buffered.start(0) >= 2, "no data can be found prior appendWindowStart");
+ ok(el.buffered.end(el.buffered.length - 1) <= 4, "no data can be found beyond appendWindowEnd");
+ el.play();
+ await once(el, "play");
+ videosb.appendWindowStart = 0;
+ p = once(el, "playing");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 8), ".m4s");
+ await p;
+ ok(true, "playing");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_LoadedMetadataFired.html b/dom/media/mediasource/test/test_LoadedMetadataFired.html
new file mode 100644
index 0000000000..68030dbe2f
--- /dev/null
+++ b/dom/media/mediasource/test/test_LoadedMetadataFired.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append initialization only</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm"), 0, 318));
+ v.play();
+ await once(v, "loadedmetadata");
+ ok(true, "Got loadedmetadata event");
+ is(v.videoWidth, 320, "videoWidth has correct initial value");
+ is(v.videoHeight, 240, "videoHeight has correct initial value");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_LoadedMetadataFired_mp4.html b/dom/media/mediasource/test/test_LoadedMetadataFired_mp4.html
new file mode 100644
index 0000000000..0934907578
--- /dev/null
+++ b/dom/media/mediasource/test/test_LoadedMetadataFired_mp4.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append initialization only</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bipbop/bipbop2s.mp4"), 0, 1395));
+ v.play();
+ await once(v, "loadedmetadata");
+ ok(true, "Got loadedmetadata event");
+ is(v.videoWidth, 400, "videoWidth has correct initial value");
+ is(v.videoHeight, 300, "videoHeight has correct initial value");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource.html b/dom/media/mediasource/test/test_MediaSource.html
new file mode 100644
index 0000000000..9bdaa0d30b
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource.html
@@ -0,0 +1,92 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ SimpleTest.doesThrow(() => new SourceBuffer, "new SourceBuffer should fail");
+ SimpleTest.doesThrow(() => new SourceBufferList, "new SourceBufferList direct should fail");
+
+ ok(ms instanceof EventTarget, "MediaSource must be an EventTarget");
+ is(ms.readyState, "closed", "New MediaSource must be in closed state");
+
+ // Wrapper creation, tests for leaks.
+ SpecialPowers.wrap(ms);
+
+ // Set an expando to force wrapper creation, tests for leaks.
+ ms.foo = null;
+
+ ok(URL.createObjectURL(ms), "Create an objectURL from the MediaSource");
+
+ let loadedmetadataCount = 0;
+ let updatestartCount = 0;
+ let updateendCount = 0;
+ let updateCount = 0;
+
+ ok(MediaSource.isTypeSupported("video/webm; codecs=vp8"), "VP8 MSE is always supported");
+ ok(MediaSource.isTypeSupported("audio/webm"), "Audio MSE is always supported");
+
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ is(ms.readyState, "open", "MediaSource must be in open state after sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+ ok(sb, "Create a SourceBuffer");
+ is(ms.sourceBuffers.length, 1, "MediaSource.sourceBuffers is expected length");
+ is(ms.sourceBuffers[0], sb, "SourceBuffer in list matches our SourceBuffer");
+ is(ms.activeSourceBuffers.length, 0, "MediaSource.activeSourceBuffers is expected length");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm")));
+ is(sb.updating, true, "SourceBuffer.updating is expected value after appendBuffer");
+
+ sb.addEventListener("update", () => {
+ is(sb.updating, false, "SourceBuffer.updating is expected value in update event");
+ updateCount++;
+ /* Ensure that we endOfStream on the first update event only as endOfStream can
+ raise more if the duration of the last buffered range and the intial duration
+ differ. See bug 1065207 */
+ if (updateCount == 1) {
+ ms.endOfStream();
+ }
+ });
+
+ sb.addEventListener("updatestart", () => updatestartCount++);
+
+ sb.addEventListener("updateend", () => {
+ is(ms.activeSourceBuffers[0], sb, "SourceBuffer in active list matches our SourceBuffer");
+ is(sb.updating, false, "SourceBuffer.updating is expected value in updateend event");
+ updateendCount++;
+ v.play();
+ });
+
+ ms.addEventListener("sourceended", () => {
+ ok(true, "Receive a sourceended event");
+ is(ms.readyState, "ended", "MediaSource must be in ended state after sourceended");
+ });
+
+ v.addEventListener("loadedmetadata", () => loadedmetadataCount++);
+
+ await once(v, "ended");
+ // XXX: Duration should be exactly 4.0, see bug 1065207.
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ ok(Math.abs(v.currentTime - 4) <= 0.002, "Video has played to end");
+ // XXX: 2 update events can be received dueto duration differences, see bug 1065207.
+ ok(updateCount == 1 || updateCount == 2, "update event received");
+ ok(updateendCount == 1 || updateendCount == 2, "updateend event received");
+ ok(updatestartCount == 1 || updatestartCount == 2, "updatestart event received");
+ is(loadedmetadataCount, 1, "loadedmetadata event received");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource_capture_gc.html b/dom/media/mediasource/test/test_MediaSource_capture_gc.html
new file mode 100644
index 0000000000..d986a6f9ac
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource_capture_gc.html
@@ -0,0 +1,72 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test garbage collection of captured stream, when playing a MediaSource</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+ <script type="text/javascript" src="mediasource.js"></script>
+</head>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+function forceGC() {
+ SpecialPowers.gc();
+ SpecialPowers.forceGC();
+ SpecialPowers.forceCC();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+window.onload = async function() {
+// Create an infinite source using a MediaSource
+let el = document.createElement("audio");
+const ms = new MediaSource();
+el.src = URL.createObjectURL(ms);
+await once(ms, "sourceopen");
+const sb = ms.addSourceBuffer("video/mp4");
+await fetchAndLoad(sb, "bipbop/bipbop_audio", ["init"], ".mp4");
+await fetchAndLoad(sb, "bipbop/bipbop_audio", range(1, 11), ".m4s");
+setInterval(async function() {
+ sb.timestampOffset = sb.buffered.end(sb.buffered.length - 1);
+ await fetchAndLoad(sb, "bipbop/bipbop_audio", range(1, 11), ".m4s");
+}, 8000);
+el.play();
+
+// Analyze the media element output.
+const ac = new AudioContext;
+const analyzer = ac.createAnalyser();
+
+// bug 1703603
+const stream = el.mozCaptureStreamUntilEnded();
+const mss = ac.createMediaStreamSource(stream);
+const gain = ac.createGain();
+// compensate mochitest volume scaling, but don't connect to the AudioContext's
+// destination to avoid noise during the test
+gain.gain.value = 90;
+mss.connect(gain).connect(analyzer);
+
+
+// Drop the media element reference: it is supposed to be kept alive by the
+// AudioContext via the `MediaStream`.
+el = null;
+
+// check whether the media element is still playing using the analyzer, spam the
+// GC to ensure all refs are kept.
+const buf = new Float32Array(analyzer.frequencyBinCount);
+const startTime = Date.now();
+function checkNonSilent() {
+ analyzer.getFloatFrequencyData(buf);
+ forceGC();
+ // Wait a good 20 seconds.
+ if (Date.now() - startTime < 2000) {
+ requestAnimationFrame(checkNonSilent);
+ } else {
+ ok(true, "All objects were kept alive.");
+ SimpleTest.finish();
+ }
+}
+checkNonSilent();
+}
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource_disabled.html b/dom/media/mediasource/test/test_MediaSource_disabled.html
new file mode 100644
index 0000000000..e14f493e0f
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource_disabled.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: disabling via pref</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+function test() {
+ ok(!window.MediaSource && !window.SourceBuffer && !window.SourceBufferList,
+ "MediaSource should be hidden behind a pref");
+ SimpleTest.doesThrow(() => new MediaSource,
+ "MediaSource should be hidden behind a pref");
+ SimpleTest.finish();
+}
+
+SpecialPowers.pushPrefEnv({"set":
+ [
+ ["media.mediasource.enabled", false],
+ ],
+}, test);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource_flac_mp4.html b/dom/media/mediasource/test/test_MediaSource_flac_mp4.html
new file mode 100644
index 0000000000..9cc159e467
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource_flac_mp4.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Can seek to last frame</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ is(ms.readyState, "open", "MediaSource must be in open state after sourceopen");
+ const sb = ms.addSourceBuffer("audio/mp4; codecs=\"flac\"");
+ ok(sb, "Create a SourceBuffer");
+
+ await fetchAndLoad(sb, "flac/IS", [""], ".mp4");
+ await fetchAndLoad(sb, "flac/0000", range(1, 3), ".m4s");
+ el.play();
+ ms.endOfStream();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource_memory_reporting.html b/dom/media/mediasource/test/test_MediaSource_memory_reporting.html
new file mode 100644
index 0000000000..70c720effd
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource_memory_reporting.html
@@ -0,0 +1,47 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: memory reporting</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ // Load a webm video and play it.
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+ await fetchAndLoad(sb, "seek", [""], ".webm");
+ const p = once(v, "ended");
+ ms.endOfStream();
+ v.play();
+ await p;
+
+ // Test that memory reporting works once we've played a video.
+ // Grab a memory report.
+ const mgr = SpecialPowers.Cc["@mozilla.org/memory-reporter-manager;1"]
+ .getService(SpecialPowers.Ci.nsIMemoryReporterManager);
+
+ let amount;
+ const handleReport = (aProcess, aPath, aKind, aUnits, aAmount) => {
+ if (aPath == "explicit/media/resources") {
+ amount = (amount || 0) + aAmount;
+ }
+ };
+
+ await new Promise(r => mgr.getReports(handleReport, null, r, null, /* anonymized = */ false));
+ ok(true, "Yay didn't crash!");
+ ok(amount !== undefined, "Got media resources amount");
+ ok(amount > 0, "Non-zero amount reported for media resources");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MediaSource_mp4.html b/dom/media/mediasource/test/test_MediaSource_mp4.html
new file mode 100644
index 0000000000..2ab79f37f3
--- /dev/null
+++ b/dom/media/mediasource/test/test_MediaSource_mp4.html
@@ -0,0 +1,90 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ SimpleTest.doesThrow(() => new SourceBuffer, "new SourceBuffer should fail");
+ SimpleTest.doesThrow(() => new SourceBufferList, "new SourceBufferList direct should fail");
+
+ ok(ms instanceof EventTarget, "MediaSource must be an EventTarget");
+ is(ms.readyState, "closed", "New MediaSource must be in closed state");
+
+ // Wrapper creation, tests for leaks.
+ SpecialPowers.wrap(ms);
+
+ // Set an expando to force wrapper creation, tests for leaks.
+ ms.foo = null;
+
+ ok(URL.createObjectURL(ms), "Create an objectURL from the MediaSource");
+
+ let loadedmetadataCount = 0;
+ let updatestartCount = 0;
+ let updateendCount = 0;
+ let updateCount = 0;
+
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ is(ms.readyState, "open", "MediaSource must be in open state after sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+ ok(sb, "Create a SourceBuffer");
+ is(ms.sourceBuffers.length, 1, "MediaSource.sourceBuffers is expected length");
+ is(ms.sourceBuffers[0], sb, "SourceBuffer in list matches our SourceBuffer");
+ is(ms.activeSourceBuffers.length, 0, "MediaSource.activeSourceBuffers is expected length");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bipbop/bipbop2s.mp4")));
+ is(sb.updating, true, "SourceBuffer.updating is expected value after appendBuffer");
+
+ sb.addEventListener("update", () => {
+ is(sb.updating, false, "SourceBuffer.updating is expected value in update event");
+ updateCount++;
+ /* Ensure that we endOfStream on the first update event only as endOfStream can
+ raise more if the duration of the last buffered range and the intial duration
+ differ. See bug 1065207 */
+ if (updateCount == 1) {
+ ms.endOfStream();
+ }
+ });
+
+ sb.addEventListener("updatestart", () => updatestartCount++);
+
+ sb.addEventListener("updateend", () => {
+ is(ms.activeSourceBuffers[0], sb, "SourceBuffer in active list matches our SourceBuffer");
+ is(sb.updating, false, "SourceBuffer.updating is expected value in updateend event");
+ updateendCount++;
+ v.play();
+ });
+
+ ms.addEventListener("sourceended", () => {
+ ok(true, "Receive a sourceended event");
+ is(ms.readyState, "ended", "MediaSource must be in ended state after sourceended");
+ });
+
+ v.addEventListener("loadedmetadata", () => loadedmetadataCount++);
+
+ await once(v, "ended");
+ // The bipbop video doesn't start at 0. The old MSE code adjust the
+ // timestamps and ignore the audio track. The new one doesn't.
+ isfuzzy(v.duration, 1.696, 0.166, "Video has correct duration");
+ isfuzzy(v.currentTime, 1.696, 0.166, "Video has correct duration");
+ // XXX: 2 update events can be received dueto duration differences, see bug 1065207.
+ ok(updateCount == 1 || updateCount == 2, "update event received");
+ ok(updateendCount == 1 || updateendCount == 2, "updateend event received");
+ ok(updatestartCount == 1 || updatestartCount == 2, "updatestart event received");
+ is(loadedmetadataCount, 1, "loadedmetadata event received");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MultipleInitSegments.html b/dom/media/mediasource/test/test_MultipleInitSegments.html
new file mode 100644
index 0000000000..f4c91c08c5
--- /dev/null
+++ b/dom/media/mediasource/test/test_MultipleInitSegments.html
@@ -0,0 +1,49 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Append buffer with multiple init segments</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+ const seek_lowres = await fetchWithXHR("seek_lowres.webm");
+ const seek = await fetchWithXHR("seek.webm");
+ const data = [
+ [seek_lowres, 0, 438], // lowres init segment
+ [seek_lowres, 438, 25950], // lowres media segment 0-1
+ [seek, 0, 318], // init segment
+ [seek, 46712, 67833], // media segment 0.8-1.201
+ ];
+ const length = data.map(d => d[2] - d[1]).reduce((a, b) => a + b, 0);
+ const arrayBuffer = new Uint8Array(length);
+ let pos = 0;
+ for (const d of data) {
+ const buffer = new Uint8Array(d[0], d[1], d[2] - d[1]);
+ arrayBuffer.set(buffer, pos);
+ pos += buffer.byteLength;
+ }
+ await loadSegment(sb, arrayBuffer);
+ // Since we are passing multiple segments in one buffer,
+ // the first durationchange event from parsing the init
+ // segment will be fired before updateend.
+ const p = once(v, "durationchange");
+ ms.endOfStream();
+ await p;
+ ok(v.duration, 1.201);
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_MultipleInitSegments_mp4.html b/dom/media/mediasource/test/test_MultipleInitSegments_mp4.html
new file mode 100644
index 0000000000..47c115677d
--- /dev/null
+++ b/dom/media/mediasource/test/test_MultipleInitSegments_mp4.html
@@ -0,0 +1,44 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Append buffer with multiple init segments</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+ const init = new Uint8Array(await fetchWithXHR("bipbop/bipbop_videoinit.mp4"));
+ const segment1 = new Uint8Array(await fetchWithXHR("bipbop/bipbop_video1.m4s"));
+ const segment2 = new Uint8Array(await fetchWithXHR("bipbop/bipbop_video2.m4s"));
+ const data = [init, segment1, init, segment2];
+ const length = data.map(d => d.byteLength).reduce((a, b) => a + b, 0);
+ const arrayBuffer = new Uint8Array(length);
+ let pos = 0;
+ for (const buffer of data) {
+ arrayBuffer.set(buffer, pos);
+ pos += buffer.byteLength;
+ }
+ await loadSegment(sb, arrayBuffer);
+ // Since we are passing multiple segments in one buffer,
+ // the first durationchange event from parsing the init
+ // segment will be fired before updateend.
+ const p = once(v, "durationchange");
+ ms.endOfStream();
+ await p;
+ ok(v.duration, 1.601666);
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_NoAudioLoopBackData.html b/dom/media/mediasource/test/test_NoAudioLoopBackData.html
new file mode 100644
index 0000000000..7de7209b74
--- /dev/null
+++ b/dom/media/mediasource/test/test_NoAudioLoopBackData.html
@@ -0,0 +1,78 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: loop-back data not available yet (shorter audio)</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+/**
+ * This test is used to check whether a looping video can loop back successfully
+ * when it has a shorter audio track than its video track. When reaching EOS for
+ * the shorter track, there is no loop-back data at the start position (they are
+ * not appended yet) Even that, we should still be able to loop back but the
+ * looping would become non-seamless in this situation.
+ */
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+
+ // Here we create a shorter audio than video.
+ info(`create different length source buffers`);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 8), ".m4s");
+ audiosb.appendWindowEnd = videosb.buffered.end(0) - 0.2;
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(5, 8), ".m4s");
+ ms.endOfStream();
+ await Promise.all([once(el, "durationchange"), once(ms, "sourceended")]);
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+ ok(true, `endOfStream completed, buffer=[${el.buffered.start(0)}, ${el.buffered.end(0)}]`);
+ ok(videosb.buffered.end(0) > audiosb.buffered.end(0), `video should be longer than audio`);
+
+ info(`seek to the position where buffered data exists`);
+ el.loop = true;
+ el.controls = true;
+ el.currentTime = el.buffered.start(0);
+ await el.play();
+
+ info(`video should trigger seeking when reaching to the end`);
+ let seekingCount = 0, seekedCount = 0;
+ el.addEventListener("seeking", () => {
+ is(++seekingCount, 1, "should only receive seeking once!");
+ });
+ el.addEventListener("seeked", () => {
+ is(++seekedCount, 1, "should only receive seeked once!");
+ });
+ await once(el, "seeking");
+
+ info(`trim old data before append new data`);
+ let p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ videosb.remove(videosb.buffered.start(0), videosb.buffered.end(0));
+ audiosb.remove(audiosb.buffered.start(0), audiosb.buffered.end(0));
+ await p;
+
+ info(`append new data`);
+ const seekedPromise = once(el, "seeked");
+ p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 2), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 2), ".m4s");
+ await p;
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+
+ info(`now we should be able to finish seeking to the start position`);
+ await seekedPromise;
+
+ SimpleTest.finish(SimpleTest);
+});
+
+</script>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_NoAudioLoopBackData_Muted.html b/dom/media/mediasource/test/test_NoAudioLoopBackData_Muted.html
new file mode 100644
index 0000000000..14cdf34bc6
--- /dev/null
+++ b/dom/media/mediasource/test/test_NoAudioLoopBackData_Muted.html
@@ -0,0 +1,79 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: loop-back data not available yet (shorter MUTED audio)</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+/**
+ * This test is used to check whether a looping video can loop back successfully
+ * when it has a shorter MUTED audio track than its video track. When reaching
+ * EOS for the shorter track, there is no loop-back data at the start position
+ * (they are not appended yet) Even that, we should still be able to loop back
+ * but the looping would become non-seamless in this situation.
+ */
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+
+ // Here we create a shorter audio than video.
+ info(`create different length source buffers`);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 8), ".m4s");
+ audiosb.appendWindowEnd = videosb.buffered.end(0) - 0.2;
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(5, 8), ".m4s");
+ ms.endOfStream();
+ await Promise.all([once(el, "durationchange"), once(ms, "sourceended")]);
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+ ok(true, `endOfStream completed, buffer=[${el.buffered.start(0)}, ${el.buffered.end(0)}]`);
+ ok(videosb.buffered.end(0) > audiosb.buffered.end(0), `video should be longer than audio`);
+
+ info(`seek to the position where buffered data exists`);
+ el.muted = true;
+ el.loop = true;
+ el.controls = true;
+ el.currentTime = el.buffered.start(0);
+ await el.play();
+
+ info(`video should trigger seeking when reaching to the end`);
+ let seekingCount = 0, seekedCount = 0;
+ el.addEventListener("seeking", () => {
+ is(++seekingCount, 1, "should only receive seeking once!");
+ });
+ el.addEventListener("seeked", () => {
+ is(++seekedCount, 1, "should only receive seeked once!");
+ });
+ await once(el, "seeking");
+
+ info(`trim old data before append new data`);
+ let p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ videosb.remove(videosb.buffered.start(0), videosb.buffered.end(0));
+ audiosb.remove(audiosb.buffered.start(0), audiosb.buffered.end(0));
+ await p;
+
+ info(`append new data`);
+ const seekedPromise = once(el, "seeked");
+ p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 2), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 2), ".m4s");
+ await p;
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+
+ info(`now we should be able to finish seeking to the start position`);
+ await seekedPromise;
+
+ SimpleTest.finish(SimpleTest);
+});
+
+</script>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_NoVideoLoopBackData.html b/dom/media/mediasource/test/test_NoVideoLoopBackData.html
new file mode 100644
index 0000000000..407b2ecb42
--- /dev/null
+++ b/dom/media/mediasource/test/test_NoVideoLoopBackData.html
@@ -0,0 +1,81 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: loop-back data not available yet (shorter video)</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+/**
+ * This test is used to check whether a looping video can loop back successfully
+ * when it has a shorter video track than its audio track. When reaching EOS for
+ * the shorter track, there is no loop-back data at the start position (they are
+ * not appended yet) Even that, we should still be able to loop back but the
+ * looping would become non-seamless in this situation.
+ */
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+
+ // Here we create a way shorter video than audio because audio decoding is
+ // very fast. If two track only have small diffence in length, audio track
+ // would still reach to the end first. But in this test, we want to test
+ // reaching video EOS first.
+ info(`create different length source buffers`);
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(5, 8), ".m4s");
+ videosb.appendWindowEnd = audiosb.buffered.end(0) - 2.5;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 8), ".m4s");
+ ms.endOfStream();
+ await Promise.all([once(el, "durationchange"), once(ms, "sourceended")]);
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+ ok(true, `endOfStream completed, buffer=[${el.buffered.start(0)}, ${el.buffered.end(0)}]`);
+ ok(audiosb.buffered.end(0) > videosb.buffered.end(0), `audio should be longer than video`);
+
+ info(`seek to the position where buffered data exists`);
+ el.loop = true;
+ el.controls = true;
+ el.currentTime = el.buffered.start(0);
+ await el.play();
+
+ info(`video should trigger seeking when reaching to the end`);
+ let seekingCount = 0, seekedCount = 0;
+ el.addEventListener("seeking", () => {
+ is(++seekingCount, 1, "should only receive seeking once!");
+ });
+ el.addEventListener("seeked", () => {
+ is(++seekedCount, 1, "should only receive seeked once!");
+ });
+ await once(el, "seeking");
+
+ info(`trim old data before append new data`);
+ let p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ videosb.remove(videosb.buffered.start(0), videosb.buffered.end(0));
+ audiosb.remove(audiosb.buffered.start(0), audiosb.buffered.end(0));
+ await p;
+
+ info(`append new data`);
+ const seekedPromise = once(el, "seeked");
+ p = Promise.all([once(videosb, "updateend"), once(audiosb, "updateend")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 2), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 2), ".m4s");
+ await p;
+ info(`audio=[${audiosb.buffered.start(0)}-${audiosb.buffered.end(0)}], video=[${videosb.buffered.start(0)}-${videosb.buffered.end(0)}]`);
+
+ info(`now we should be able to finish seeking to the start position`);
+ await seekedPromise;
+
+ SimpleTest.finish(SimpleTest);
+});
+
+</script>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_OnEvents.html b/dom/media/mediasource/test/test_OnEvents.html
new file mode 100644
index 0000000000..ae0f348ebe
--- /dev/null
+++ b/dom/media/mediasource/test/test_OnEvents.html
@@ -0,0 +1,42 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: live seekable range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ const receiveEvent = e => v["got" + e] = true;
+
+ const msevents = ["onsourceopen", "onsourceended"];
+ msevents.forEach(e => ms[e] = () => receiveEvent(e));
+
+ const sblistevents = ["onaddsourcebuffer", "onremovesourcebuffer"];
+ sblistevents.forEach(e => ms.sourceBuffers[e] = () => receiveEvent(e));
+
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const sbevents = ["onupdatestart", "onupdate", "onupdateend", "onabort"];
+ sbevents.forEach(e => sb[e] = () => receiveEvent(e));
+
+ await fetchAndLoad(sb, "seek", [""], ".webm");
+ sb.appendBuffer(await fetchWithXHR("seek.webm"));
+ ms.removeSourceBuffer(sb); // will fire abort and removesourcebuffer
+ ms.endOfStream(); // will fire sourceended
+ await once(ms, "sourceended");
+ [...msevents, ...sbevents, ...sblistevents].forEach(e => ok(v["got" + e], "got " + e));
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_PlayEvents.html b/dom/media/mediasource/test/test_PlayEvents.html
new file mode 100644
index 0000000000..82ccaa42b5
--- /dev/null
+++ b/dom/media/mediasource/test/test_PlayEvents.html
@@ -0,0 +1,115 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test checks that readyState is properly set and the appropriate events are being fired accordingly:
+// 1. Load 1.6s of data and ensure that canplay event is fired.
+// 2. Load data to have a complete buffered range from 0 to duration and ensure that canplaythrough is fired.
+// 3. Seek to an area with no buffered data, and ensure that readyState goes back to HAVE_METADATA
+// 4. Load 1.6s of data at the seek position and ensure that canplay is fired and that readyState is now HAVE_FUTURE_DATA
+// 5. Start playing video and check that once it reaches a position with no data, readyState goes back to HAVE_CURRENT_DATA and waiting event is fired.
+// 6. Add 1.6s of data once video element fired waiting, that canplay is fired once readyState is HAVE_FUTURE_DATA.
+// 7. Finally load data to the end and ensure that canplaythrough is fired and that readyState is now HAVE_ENOUGH_DATA
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ logEvents(el);
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+ p = Promise.all([once(el, "loadeddata"), once(el, "canplay")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 3), ".m4s");
+ await p;
+ ok(true, "got canplay event");
+ // set element duration to 3.203333s. We do so in order to guarantee that
+ // the end of the buffered range will be equal to duration, causing
+ // canplaythrough to be fired later.
+ ms.duration = 3.203333;
+ await once(el, "durationchange");
+ ok(true, "got durationchange event");
+ // Load [0.801666, 3.203333]
+ p = once(el, "canplaythrough");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(3, 5), ".m4s");
+ await p;
+ ok(true, "got canplaythrough event");
+ // set element duration to 9.203333s, this value is set to coincide with
+ // data added later (we now have an empty range from 6s to 9.203333s).
+ ms.duration = 9.203333;
+ await once(el, "durationchange");
+ ok(true, "got durationchange event");
+ // An arbitrary value, so we are guaranteed to be in a range with no data.
+ el.currentTime = 6;
+ videosb.timestampOffset = 6;
+ ok(el.seeking, "seeking started");
+ await once(el, "seeking");
+ ok(true, "got seeking event");
+ is(el.readyState, el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ // Load [6+0, 6+1.601666)
+ p = Promise.all([once(el, "seeked"), once(el, "canplay")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 3), ".m4s");
+ await p;
+ ok(true, "got seeked and canplay event");
+ is(el.currentTime, 6, "seeked to 6s");
+ is(el.readyState, el.HAVE_FUTURE_DATA, "readyState is HAVE_FUTURE_DATA");
+ // Load [6+1.60166, 6+3.203333]
+ p = once(el, "canplaythrough");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(3, 5), ".m4s");
+ await p;
+ ok(true, "got canplaythrough event");
+ // set element duration to 19.805s, this value is set to coincide with
+ // data added later (we now have an empty range from 15 to 19.805).
+ ms.duration = 19.805;
+ await once(el, "durationchange");
+ ok(true, "got durationchange event");
+ el.currentTime = 15;
+ videosb.timestampOffset = 15;
+ ok(el.seeking, "seeking started");
+ await once(el, "seeking");
+ ok(true, "got seeking event");
+ // Load [15+0, 15+1.601666)
+ p = once(el, "seeked");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 3), ".m4s");
+ await p;
+ ok(true, "got seeked event");
+ // Load [15+1.60166, 15+3.203333]
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(3, 5), ".m4s");
+ ok(true, "data loaded");
+ // Playback we play for a little while then stall.
+ p = Promise.all([once(el, "playing"), once(el, "waiting")]);
+ el.play();
+ await p;
+ ok(true, "got playing and waiting event");
+ // Playback has stalled, readyState is back to HAVE_CURRENT_DATA.
+ is(el.readyState, el.HAVE_CURRENT_DATA, "readyState is HAVE_CURRENT_DATA");
+ // Load [15+3.203333, 15+4.805)
+ // Our final buffered range will now be [0, 3.203333)[6, 9.203333)[15, 19.805)
+ p = Promise.all([once(el, "playing"), once(el, "canplay"), once(el, "canplaythrough")]);
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 7), ".m4s");
+ await p;
+ ok(true, "got playing, canplay and canplaythrough event");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_PlayEventsAutoPlaying.html b/dom/media/mediasource/test/test_PlayEventsAutoPlaying.html
new file mode 100644
index 0000000000..3e395c799d
--- /dev/null
+++ b/dom/media/mediasource/test/test_PlayEventsAutoPlaying.html
@@ -0,0 +1,58 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test checks that readyState is properly set and the appropriate events are being fired accordingly:
+// 1. Ensure that play/playing aren't fired before any media data been added.
+// 2. Load 1.6s of data and ensure that canplay, play and playing events are fired.
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ el.autoplay = true;
+ const eventCounts = { play: 0, playing: 0 };
+ await once(ms, "sourceopen");
+ logEvents(el);
+ ok(true, "Receive a sourceopen event");
+
+ const forbiddenEvents = e => {
+ ok(el.readyState >= el.HAVE_FUTURE_DATA, "Must not have received event too early");
+ is(eventCounts[e.type], 0, "event should have only be fired once");
+ eventCounts[e.type]++;
+ };
+ el.addEventListener("play", forbiddenEvents);
+ el.addEventListener("playing", forbiddenEvents);
+
+ const videosb = ms.addSourceBuffer("video/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+ // We're only adding 1.6s worth of data, not enough for readyState to change to HAVE_ENOUGH_DATA
+ // So we end the media source so that all the playable data is available.
+ p = Promise.all(["loadeddata", "canplay", "play", "playing", "ended"].map(e => once(el, e)));
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 3), ".m4s");
+ ms.endOfStream();
+ await p;
+ ok(true, "got all required event");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_PlayEventsAutoPlaying2.html b/dom/media/mediasource/test/test_PlayEventsAutoPlaying2.html
new file mode 100644
index 0000000000..8845a26ac4
--- /dev/null
+++ b/dom/media/mediasource/test/test_PlayEventsAutoPlaying2.html
@@ -0,0 +1,58 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// This test checks that readyState is properly set and the appropriate events are being fired accordingly:
+// 1. Ensure that play/playing aren't fired before any media data been added.
+// 2. Load more than 10s of data and ensure that canplay, play and playing events are fired.
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ el.autoplay = true;
+ const eventCounts = { play: 0, playing: 0 };
+ await once(ms, "sourceopen");
+ logEvents(el);
+ ok(true, "Receive a sourceopen event");
+
+ const forbiddenEvents = e => {
+ ok(el.readyState >= el.HAVE_FUTURE_DATA, "Must not have received event too early");
+ is(eventCounts[e.type], 0, "event should have only be fired once");
+ eventCounts[e.type]++;
+ };
+ el.addEventListener("play", forbiddenEvents);
+ el.addEventListener("playing", forbiddenEvents);
+
+ const videosb = ms.addSourceBuffer("video/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await p;
+ ok(true, "got loadedmetadata event");
+ // We shift the timestamps slightly to create a small gaps at the start.
+ // one that should normally be ignored.
+ videosb.timestampOffset = 0.1;
+ p = Promise.all(["loadeddata", "canplay", "play", "playing"].map(e => once(el, e)));
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 14), ".m4s");
+ await p;
+ ok(true, "got all required event");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_RemoveSourceBuffer.html b/dom/media/mediasource/test/test_RemoveSourceBuffer.html
new file mode 100644
index 0000000000..11c6a51deb
--- /dev/null
+++ b/dom/media/mediasource/test/test_RemoveSourceBuffer.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: check buffered status after removed all source buffer</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+const videoURL = "seek.webm";
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async function(ms, el) {
+ info("- wait for sourceopen -");
+ await once(ms, "sourceopen");
+
+ info("- wait for fetching data -");
+ const arrayBuffer = await fetchWithXHR(videoURL);
+
+ info("- create source buffer and append data -");
+ const sourceBuffer = ms.addSourceBuffer("video/webm");
+ sourceBuffer.appendBuffer(arrayBuffer);
+ await once(sourceBuffer, "updateend");
+ is(ms.sourceBuffers.length, 1,
+ "the length of source buffers list is 1.");
+ is(ms.activeSourceBuffers.length, 1,
+ "the length of active source buffers list is 1.");
+ ok(ms.duration != 0, "duration is not 0.");
+ is(el.buffered.length, 1, "buffered range is 1.");
+
+ info("- remove source buffer from media source -");
+ ms.removeSourceBuffer(sourceBuffer);
+ await once(ms.sourceBuffers, "removesourcebuffer");
+ is(ms.sourceBuffers.length, 0, "source buffers list is empty.");
+ is(ms.activeSourceBuffers.length, 0, "active source buffers list is empty.");
+ ok(ms.duration != 0, "duration is not 0.");
+ is(el.buffered.length, 0,
+ "buffered range is empty since we don't have any source buffer.");
+
+ info("- call endOfStream -");
+ ms.endOfStream();
+ is(ms.duration, 0, "duraton is 0 since we don't have any source buffer.");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_Resolution_change_should_not_cause_video_freeze.html b/dom/media/mediasource/test/test_Resolution_change_should_not_cause_video_freeze.html
new file mode 100644
index 0000000000..640b53441e
--- /dev/null
+++ b/dom/media/mediasource/test/test_Resolution_change_should_not_cause_video_freeze.html
@@ -0,0 +1,49 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: video resolution changes during playback should not cause video freeze (Bug 1718709)</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer('video/mp4');
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bug1718709_low_res.mp4")));
+ ok(true, "appended low resolution video");
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bug1718709_high_res.mp4")));
+ ok(true, "appended high resolution video");
+
+ info(`start from the position which is near to the place where resolution changes`);
+ v.currentTime = 13;
+ ok(await v.play().then(_=>true,_=>false), "video started playing");
+
+ // When video resolution changes, it should not cause video freeze so we check
+ // its painted frame amount regularly to see if we stop updating video frames.
+ let lastPaintedFramesAmount = v.mozPaintedFrames;
+ const intervalHandle = setInterval(_=>{
+ ok(lastPaintedFramesAmount < v.mozPaintedFrames,
+ `painted frames keeps growing from ${lastPaintedFramesAmount} to ${v.mozPaintedFrames}`);
+ lastPaintedFramesAmount = v.mozPaintedFrames;
+ }, 1000);
+
+ // As we didn't append full video, so we will receive `waiting` event later
+ // which indicates that we can stop testing because we've finished playing
+ // the high resolution part.
+ await new Promise(r => {
+ v.onwaiting = _ => {
+ clearInterval(intervalHandle);
+ r();
+ }
+ });
+ SimpleTest.finish();
+});
+
+</script>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_ResumeAfterClearing_mp4.html b/dom/media/mediasource/test/test_ResumeAfterClearing_mp4.html
new file mode 100644
index 0000000000..40e512ba12
--- /dev/null
+++ b/dom/media/mediasource/test/test_ResumeAfterClearing_mp4.html
@@ -0,0 +1,44 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Don't get stuck buffering for too long when we have frames to show</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ ms.addEventListener("sourceopen", () => ok(false, "No more sourceopen"));
+ const sb = ms.addSourceBuffer("video/mp4");
+ ok(sb, "Create a SourceBuffer");
+ sb.addEventListener("error", e => {
+ ok(false, "Got Error: " + e);
+ SimpleTest.finish();
+ });
+ await fetchAndLoad(sb, "bipbop/bipbop", ["init"], ".mp4");
+ let p = once(v, "loadeddata");
+ await fetchAndLoad(sb, "bipbop/bipbop", range(1, 3), ".m4s");
+ await p;
+ // clear the entire sourcebuffer.
+ sb.remove(0, 5);
+ await once(sb, "updateend");
+ v.play();
+ // We have nothing to play, waiting will be fired.
+ await once(v, "waiting");
+ p = once(v, "playing");
+ await fetchAndLoad(sb, "bipbop/bipbop", range(1, 4), ".m4s");
+ await p;
+ ms.endOfStream();
+ await Promise.all([once(ms, "sourceended"), once(v, "ended")]);
+ SimpleTest.finish(SimpleTest);
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekNoData_mp4.html b/dom/media/mediasource/test/test_SeekNoData_mp4.html
new file mode 100644
index 0000000000..1ea64f3fa4
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekNoData_mp4.html
@@ -0,0 +1,57 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ el.addEventListener("error", e => {
+ ok(false, `should not fire ${e.type} event`);
+ SimpleTest.finish();
+ });
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ must_not_throw(() => el.currentTime = 3, "setting currentTime is valid");
+ is(el.currentTime, 3, "currentTime is default playback start position");
+ is(el.seeking, false, "seek not started with HAVE_NOTHING");
+ await Promise.all([
+ fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4"),
+ fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4"),
+ once(el, "loadedmetadata"),
+ ]);
+ const p = once(el, "seeking");
+ el.play();
+ el.currentTime = 5;
+ is(el.readyState, el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ is(el.seeking, true, "seek not started with HAVE_METADATA");
+ is(el.currentTime, 5, "currentTime is seek position");
+ await p;
+ ok(true, "Got seeking event");
+ await Promise.all([
+ once(el, "seeked"),
+ fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(5, 9), ".m4s"),
+ fetchAndLoad(videosb, "bipbop/bipbop_video", range(6, 10), ".m4s"),
+ ]);
+ ok(true, "Got seeked event");
+ ok(el.currentTime >= 5, "Time >= 5");
+ ms.endOfStream();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekToEnd_mp4.html b/dom/media/mediasource/test/test_SeekToEnd_mp4.html
new file mode 100644
index 0000000000..0405cb875f
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekToEnd_mp4.html
@@ -0,0 +1,54 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seeking to end of data with data gap.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 6), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ is(videosb.buffered.length, 1, "continuous buffered range");
+ // Ensure we have at least 2s less audio than video.
+ audiosb.appendWindowEnd = videosb.buffered.end(0) - 2;
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 6), ".m4s");
+ ms.endOfStream();
+ await Promise.all([once(el, "durationchange"), once(ms, "sourceended")]);
+ ok(true, "endOfStream completed");
+ // Seek to the middle of the gap where audio is missing. As we are in readyState = ended
+ // seeking must complete.
+ el.currentTime = videosb.buffered.end(0) / 2 + audiosb.buffered.end(0) / 2;
+ ok(el.currentTime - audiosb.buffered.end(0) >= 1, "gap is big enough");
+ is(el.buffered.length, 1, "continuous buffered range");
+ is(el.buffered.end(0), videosb.buffered.end(0),
+ "buffered range end is aligned with longest track");
+ ok(el.seeking, "element is now seeking");
+ ok(el.currentTime >= el.buffered.start(0) && el.currentTime <= el.buffered.end(0),
+ "seeking time is in buffered range");
+ ok(el.currentTime > audiosb.buffered.end(0),
+ "seeking point is not buffered in audio track");
+ await once(el, "seeked");
+ ok(true, "we have successfully seeked");
+ // Now ensure that we can play to the end, even though we are missing data in one track.
+ el.play();
+ await once(el, "ended");
+ SimpleTest.finish(SimpleTest);
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekToLastFrame_mp4.html b/dom/media/mediasource/test/test_SeekToLastFrame_mp4.html
new file mode 100644
index 0000000000..edbfdff0a0
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekToLastFrame_mp4.html
@@ -0,0 +1,34 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Can seek to last frame</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const sb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", ["init"], ".mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_480_624kbps-video", range(1, 3), ".m4s");
+ el.play();
+ // let seek to the last audio frame.
+ el.currentTime = 1.532517;
+ await once(el, "seeked");
+ ok(true, "seek completed");
+ ms.endOfStream();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekTwice_mp4.html b/dom/media/mediasource/test/test_SeekTwice_mp4.html
new file mode 100644
index 0000000000..50ff32b1cd
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekTwice_mp4.html
@@ -0,0 +1,45 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 5), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(6, 12), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 6), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(7, 14), ".m4s");
+ let p = once(el, "seeking");
+ el.play();
+ el.currentTime = 4.5; // Seek to a gap in the video
+ await p;
+ ok(true, "Got seeking event");
+ p = once(el, "seeked");
+ el.currentTime = 6; // Seek past the gap.
+ await p;
+ ok(true, "Got seeked event");
+ ok(el.currentTime >= 6, "Time >= 6");
+ ms.endOfStream();
+ await once(el, "ended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream.html b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream.html
new file mode 100644
index 0000000000..c65a4aff7e
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream.html
@@ -0,0 +1,54 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute after end of stream</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ info("- append first buffer -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- check seekable -");
+ const target = 2;
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+
+ info("- call end of stream -");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+
+ info("- check seekable -");
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit.html b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit.html
new file mode 100644
index 0000000000..bed2af8d48
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit.html
@@ -0,0 +1,60 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute after end of stream with split appendBuffer</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ info("- append first buffer -");
+ // 25523 is the offset of the first media segment's end
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 25523));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- append second buffer -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 25523));
+ await once(sb, "updateend");
+
+ info("- check seekable -");
+ const target = 2;
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+
+ info("- call end of stream -");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+
+ info("- check seekable -");
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit_mp4.html b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit_mp4.html
new file mode 100644
index 0000000000..00b5f9a832
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStreamSplit_mp4.html
@@ -0,0 +1,60 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute after end of stream with split appendBuffer</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ info("- append first buffer -");
+ // 25819 is the offset of the first media segment's end
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 25819));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- append second buffer -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 25819));
+ await once(sb, "updateend");
+
+ info("- check seekable -");
+ const target = 1.3;
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+
+ info("- call end of stream -");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+
+ info("- check seekable -");
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream_mp4.html b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream_mp4.html
new file mode 100644
index 0000000000..c8e53833fb
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekableBeforeAndAfterEndOfStream_mp4.html
@@ -0,0 +1,55 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: seekable attribute after end of stream</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ info("- append buffer -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- check seekable -");
+ const target = 1.3;
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+
+ info("- call end of stream -");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+
+ info("- check seekable -");
+ ok(v.seekable.length, "Resource is seekable");
+ is(v.seekable.start(0), 0, "Seekable's start point is correct");
+ is(v.seekable.end(0), ms.duration, "Seekable's end point is correct");
+ ok(v.seekable.length &&
+ target >= v.seekable.start(0) &&
+ target < v.seekable.end(0), "Target is within seekable range");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SeekedEvent_mp4.html b/dom/media/mediasource/test/test_SeekedEvent_mp4.html
new file mode 100644
index 0000000000..70401f1eb1
--- /dev/null
+++ b/dom/media/mediasource/test/test_SeekedEvent_mp4.html
@@ -0,0 +1,48 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: Check that seeked event is fired prior loadeddata</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ const events = ["seeked", "loadeddata", "playing"];
+ let eventCount = 0;
+ events.forEach(type => el.addEventListener(type,
+ () => is(events[eventCount++], type, "events must come in order")));
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
+ let p = once(el, "loadedmetadata");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await p;
+ el.play();
+ videosb.timestampOffset = 2;
+ is(el.readyState, el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ // Load [2, 3.606).
+ p = once(el, "play");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["1"], ".m4s");
+ await p;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["2"], ".m4s");
+ // TODO: readyState should be at least HAVE_CURRENTDATA, see bug 1367993.
+ ok(el.readyState >= el.HAVE_METADATA, "readyState is HAVE_METADATA");
+ el.currentTime = 2;
+ await Promise.all([once(el, "seeked"), once(el, "playing")]);
+ ok(true, "completed seek");
+ is(eventCount, events.length, "Received expected number of events");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_Sequence_mp4.html b/dom/media/mediasource/test/test_Sequence_mp4.html
new file mode 100644
index 0000000000..5af7fe5a0b
--- /dev/null
+++ b/dom/media/mediasource/test/test_Sequence_mp4.html
@@ -0,0 +1,37 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: Don't get stuck buffering for too long when we have frames to show</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ ms.addEventListener("sourceopen", () => ok(false, "No more sourceopen"));
+ const sb = ms.addSourceBuffer("video/mp4");
+ ok(sb, "Create a SourceBuffer");
+ sb.addEventListener("error", e => {
+ ok(false, "Got Error: " + e);
+ SimpleTest.finish();
+ });
+ sb.mode = "sequence";
+
+ await fetchAndLoad(sb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(sb, "bipbop/bipbop_video", ["5"], ".m4s");
+ await fetchAndLoad(sb, "bipbop/bipbop_video", ["2"], ".m4s");
+ is(v.buffered.length, 1, "Continuous buffered range");
+ is(v.buffered.start(0), 0, "Buffered range starts at 0");
+ ok(sb.timestampOffset >= 0, "SourceBuffer.timestampOffset set to allow continuous range");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SetModeThrows.html b/dom/media/mediasource/test/test_SetModeThrows.html
new file mode 100644
index 0000000000..c715854b41
--- /dev/null
+++ b/dom/media/mediasource/test/test_SetModeThrows.html
@@ -0,0 +1,34 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append initialization</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// MSE supports setting mode now. make sure it does not throw.
+runWithMSE(function(ms, v) {
+ ms.addEventListener("sourceopen", () => {
+ const sb = ms.addSourceBuffer("video/webm");
+
+ sb.mode = "segments";
+ ok("true", "Setting to segments does not throw");
+ try {
+ sb.mode = "sequence";
+ ok("true", "Setting to sequence does not throw");
+ } catch (e) { ok(false, "Should not throw setting mode to sequence: " + e); }
+
+ SimpleTest.finish();
+ });
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SplitAppend.html b/dom/media/mediasource/test/test_SplitAppend.html
new file mode 100644
index 0000000000..a4be5de282
--- /dev/null
+++ b/dom/media/mediasource/test/test_SplitAppend.html
@@ -0,0 +1,36 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append initialization and media segment separately</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 318));
+ v.play();
+ await once(sb, "updateend");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 318));
+ await once(sb, "updateend");
+ ms.endOfStream();
+ await once(v, "ended");
+ // XXX: Duration should be exactly 4.0, see bug 1065207.
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ ok(Math.abs(v.currentTime - 4) <= 0.002, "Video has played to end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SplitAppendDelay.html b/dom/media/mediasource/test/test_SplitAppendDelay.html
new file mode 100644
index 0000000000..40183c3db0
--- /dev/null
+++ b/dom/media/mediasource/test/test_SplitAppendDelay.html
@@ -0,0 +1,38 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append segments with delay</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+SimpleTest.requestFlakyTimeout("untriaged");
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 318));
+ v.play();
+ await once(sb, "updateend");
+ await wait(1000);
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 318));
+ await once(sb, "updateend");
+ ms.endOfStream();
+ await once(v, "ended");
+ // XXX: Duration should be exactly 4.0, see bug 1065207.
+ ok(Math.abs(v.duration - 4) <= 0.002, "Video has correct duration");
+ ok(Math.abs(v.currentTime - 4) <= 0.002, "Video has played to end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SplitAppendDelay_mp4.html b/dom/media/mediasource/test/test_SplitAppendDelay_mp4.html
new file mode 100644
index 0000000000..c072a526cf
--- /dev/null
+++ b/dom/media/mediasource/test/test_SplitAppendDelay_mp4.html
@@ -0,0 +1,39 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append segments with delay</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+SimpleTest.requestFlakyTimeout("untriaged");
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 1395));
+ v.play();
+ await once(sb, "updateend");
+ await wait(1000);
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 1395));
+ await once(sb, "updateend");
+ ms.endOfStream();
+ await once(v, "ended");
+ // The bipbop video doesn't start at 0. The old MSE code adjust the
+ // timestamps and ignore the audio track. The new one doesn't.
+ isfuzzy(v.duration, 1.696, 0.166, "Video has correct duration");
+ isfuzzy(v.currentTime, 1.696, 0.166, "Video has played to end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_SplitAppend_mp4.html b/dom/media/mediasource/test/test_SplitAppend_mp4.html
new file mode 100644
index 0000000000..308fa9837d
--- /dev/null
+++ b/dom/media/mediasource/test/test_SplitAppend_mp4.html
@@ -0,0 +1,38 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: append initialization and media segment separately</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ const arrayBuffer = await fetchWithXHR("bipbop/bipbop2s.mp4");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 0, 1395));
+ v.play();
+ await once(sb, "updateend");
+ sb.appendBuffer(new Uint8Array(arrayBuffer, 1395));
+ await once(sb, "updateend");
+ ms.endOfStream();
+
+ await once(v, "ended");
+ // The bipbop video doesn't start at 0. The old MSE code adjust the
+ // timestamps and ignore the audio track. The new one doesn't.
+ isfuzzy(v.duration, 1.696, 0.166, "Video has correct duration");
+ isfuzzy(v.currentTime, 1.696, 0.166, "Video has played to end");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_Threshold_mp4.html b/dom/media/mediasource/test/test_Threshold_mp4.html
new file mode 100644
index 0000000000..c46883c93d
--- /dev/null
+++ b/dom/media/mediasource/test/test_Threshold_mp4.html
@@ -0,0 +1,73 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: data gap detection</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ const threshold = 0.5; // gap threshold in seconds.
+ const fuzz = 0.000001; // fuzz when comparing double.
+
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ const vchunks = [{start: 0, end: 3.203333}, { start: 3.203333, end: 6.406666}];
+
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 5), ".m4s");
+ // We will insert a gap of threshold
+ videosb.timestampOffset = threshold;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 9), ".m4s");
+ // HTMLMediaElement fires 'waiting' if somebody invokes |play()| before the MDSM
+ // has notified it of available data. Make sure that we get 'playing' before
+ // we starting waiting for 'waiting'.
+ info("Invoking play()");
+ let p = once(el, "playing");
+ el.play();
+ await p;
+ await once(el, "waiting");
+ // We're waiting for data after the start of the last frame.
+ // 0.033333 is the duration of the last frame.
+ ok((el.currentTime >= vchunks[1].end - 0.033333 + threshold - fuzz &&
+ el.currentTime <= vchunks[1].end + threshold + fuzz),
+ `skipped the gap properly: ${el.currentTime} ${vchunks[1].end + threshold}`);
+ is(el.buffered.length, 2, "buffered range has right length");
+ // Now we test that seeking will succeed despite the gap.
+ el.currentTime = el.buffered.end(0) + (threshold / 2);
+ await once(el, "seeked");
+ // Now we test that we don't pass the gap.
+ // Clean up our sourcebuffer by removing all data.
+ videosb.timestampOffset = 0;
+ videosb.remove(0, Infinity);
+ el.currentTime = 0;
+ el.pause();
+ await once(videosb, "updateend");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 5), ".m4s");
+ // We will insert a gap of threshold + 1ms
+ videosb.timestampOffset = threshold + 1 / 1000;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 9), ".m4s");
+ info("Invoking play()");
+ p = once(el, "playing");
+ el.play();
+ await p;
+ await once(el, "waiting");
+ // We're waiting for data after the start of the last frame.
+ // 0.033333 is the duration of the last frame.
+ ok((el.currentTime >= vchunks[0].end - 0.033333 - fuzz &&
+ el.currentTime <= vchunks[0].end + fuzz),
+ `stopped at the gap properly: ${el.currentTime} ${vchunks[0].end}`);
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_TimestampOffset_mp4.html b/dom/media/mediasource/test/test_TimestampOffset_mp4.html
new file mode 100644
index 0000000000..bd08e0f36e
--- /dev/null
+++ b/dom/media/mediasource/test/test_TimestampOffset_mp4.html
@@ -0,0 +1,76 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: basic functionality</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ const eps = 0.01;
+
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ // We divide the video into 3 chunks:
+ // chunk 0: segments 1-4
+ // chunk 1: segments 5-8
+ // chunk 2: segments 9-13
+ // We then fill the timeline so that it seamlessly plays the chunks in order 0, 2, 1.
+ const vchunks = [{start: 0, end: 3.2033},
+ { start: 3.2033, end: 6.4066},
+ { start: 6.4066, end: 10.01}];
+ const firstvoffset = vchunks[2].end - vchunks[2].start; // Duration of chunk 2
+ const secondvoffset = -(vchunks[1].end - vchunks[1].start); // -(Duration of chunk 1)
+
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 5), ".m4s");
+ is(videosb.buffered.length, 1, "No discontinuity");
+ isfuzzy(videosb.buffered.start(0), vchunks[0].start, eps, "Chunk start");
+ isfuzzy(videosb.buffered.end(0), vchunks[0].end, eps, "Chunk end");
+ videosb.timestampOffset = firstvoffset;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(5, 9), ".m4s");
+ is(videosb.buffered.length, 2, "One discontinuity");
+ isfuzzy(videosb.buffered.start(0), vchunks[0].start, eps, "First Chunk start");
+ isfuzzy(videosb.buffered.end(0), vchunks[0].end, eps, "First chunk end");
+ isfuzzy(videosb.buffered.start(1), vchunks[1].start + firstvoffset, eps, "Second chunk start");
+ isfuzzy(videosb.buffered.end(1), vchunks[1].end + firstvoffset, eps, "Second chunk end");
+ videosb.timestampOffset = secondvoffset;
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(9, 14), ".m4s");
+ is(videosb.buffered.length, 1, "No discontinuity (end)");
+ isfuzzy(videosb.buffered.start(0), vchunks[0].start, eps, "Chunk start");
+ isfuzzy(videosb.buffered.end(0), vchunks[2].end, eps, "Chunk end");
+ audiosb.timestampOffset = 3;
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 12), ".m4s");
+ is(audiosb.buffered.length, 1, "No audio discontinuity");
+ isfuzzy(audiosb.buffered.start(0), 3, eps, "Audio starts at 3");
+
+ // Trim the rest of the audio.
+ audiosb.remove(videosb.buffered.end(0), Infinity);
+ videosb.remove(videosb.buffered.end(0), Infinity);
+ if (audiosb.updating) {
+ await once(audiosb, "updateend");
+ }
+ if (videosb.updating) {
+ await once(videosb, "updateend");
+ }
+ info("waiting for play to complete");
+ el.play();
+ el.currentTime = el.buffered.start(0);
+ ms.endOfStream();
+ await Promise.all([once(el, "ended"), once(el, "seeked")]);
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_TruncatedDuration.html b/dom/media/mediasource/test/test_TruncatedDuration.html
new file mode 100644
index 0000000000..c80e40ac98
--- /dev/null
+++ b/dom/media/mediasource/test/test_TruncatedDuration.html
@@ -0,0 +1,55 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: truncating the media seeks to end of media and update buffered range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+// This test append data to a mediasource and then seek to half the duration
+// of the video.
+// We then shorten the video to 1/3rd of its original size by modifying the
+// mediasource.duration attribute.
+// We ensure that the buffered range immediately reflect the truncation
+// and that we've seeked to the new end of the media as per W3C spec and
+// video.currentTime got updated.
+
+SimpleTest.waitForExplicitFinish();
+
+const round = n => Math.round(n * 1000) / 1000;
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("seek.webm")));
+ await once(sb, "updateend");
+ v.currentTime = v.duration / 2;
+ is(v.currentTime, v.duration / 2, "current time was updated");
+ ok(v.seeking, "seeking is true");
+ await once(v, "seeked");
+ const duration = round(v.duration / 3);
+ is(sb.updating, false, "sourcebuffer isn't updating");
+ sb.remove(duration, Infinity);
+ await once(sb, "updateend");
+ ms.duration = duration;
+ // frames aren't truncated, so duration may be slightly more.
+ isfuzzy(v.duration, duration, 1 / 30, "element duration was updated");
+ sb.abort(); // this shouldn't abort updating the duration (bug 1130826).
+ ok(v.seeking, "seeking is true");
+ // test playback position was updated (bug 1130839).
+ is(v.currentTime, v.duration, "current time was updated");
+ is(sb.buffered.length, 1, "One buffered range");
+ // Truncated mediasource duration will cause the video element to seek.
+ await once(v, "seeking");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_TruncatedDuration_mp4.html b/dom/media/mediasource/test/test_TruncatedDuration_mp4.html
new file mode 100644
index 0000000000..2f37150fd3
--- /dev/null
+++ b/dom/media/mediasource/test/test_TruncatedDuration_mp4.html
@@ -0,0 +1,59 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: truncating the media seeks to end of media and update buffered range</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+// This test append data to a mediasource and then seek to half the duration
+// of the video.
+// We then shorten the video to 1/3rd of its original size.
+// We ensure that the buffered range immediately reflect the truncation
+// and that we've seeked to the new end of the media as per W3C spec and
+// video.currentTime got updated.
+
+SimpleTest.waitForExplicitFinish();
+
+const round = n => Math.round(n * 1000) / 1000;
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/mp4");
+
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("bipbop/bipbop2s.mp4")));
+ await once(sb, "updateend");
+ // mp4 metadata states 10s when we only have 1.6s worth of video.
+ sb.remove(sb.buffered.end(0), Infinity);
+ await once(sb, "updateend");
+ ms.duration = sb.buffered.end(0);
+ is(v.duration, ms.duration, "current time updated with mediasource duration");
+ v.currentTime = v.duration / 2;
+ is(v.currentTime, v.duration / 2, "current time was updated");
+ ok(v.seeking, "seeking is true");
+ await once(v, "seeked");
+ const duration = round(v.duration / 3);
+ is(sb.updating, false, "sourcebuffer isn't updating");
+ sb.remove(duration, Infinity);
+ await once(sb, "updateend");
+ ms.duration = duration;
+ // frames aren't truncated, so duration may be slightly more.
+ isfuzzy(v.duration, duration, 1 / 30, "element duration was updated");
+ sb.abort(); // this shouldn't abort updating the duration (bug 1130826).
+ ok(v.seeking, "seeking is true");
+ // test playback position was updated (bug 1130839).
+ is(v.currentTime, v.duration, "current time was updated");
+ is(sb.buffered.length, 1, "One buffered range");
+ // Truncated mediasource duration will cause the video element to seek.
+ await once(v, "seeking");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WMFUnmatchedAudioDataTime.html b/dom/media/mediasource/test/test_WMFUnmatchedAudioDataTime.html
new file mode 100644
index 0000000000..7c03214c7b
--- /dev/null
+++ b/dom/media/mediasource/test/test_WMFUnmatchedAudioDataTime.html
@@ -0,0 +1,32 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: audio output time doesn't match the input time on WMF</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer('audio/mp4;codecs=" mp4a.40.2"');
+ sb.appendBuffer(new Uint8Array(await fetchWithXHR("wmf_mismatchedaudiotime.mp4")));
+ ok(true, "appended data");
+
+ info(`if error doesn't occur, we should be able to receive 'seeked', otherwise 'error' would be dispatched`);
+ v.currentTime = 22.05;
+ ok(await Promise.race([
+ once(v, "seeked").then(_ => true),
+ once(v, "error").then(_ => false),
+ ]), "finished seeking without any error");
+ ok(!v.error, "should not get any error");
+ SimpleTest.finish();
+});
+
+</script>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WaitingOnMissingData.html b/dom/media/mediasource/test/test_WaitingOnMissingData.html
new file mode 100644
index 0000000000..b1ad41bb37
--- /dev/null
+++ b/dom/media/mediasource/test/test_WaitingOnMissingData.html
@@ -0,0 +1,60 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: |waiting| event when source data is missing</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const sb = ms.addSourceBuffer("video/webm");
+ sb.addEventListener("error", e => {
+ ok(false, "Got Error: " + e);
+ SimpleTest.finish();
+ });
+ const arrayBuffer = await fetchWithXHR("seek.webm");
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 0, 318));
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 318, 25223 - 318));
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 25223, 46712 - 25223));
+ /* Note - Missing |46712, 67833 - 46712| segment here */
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 67833, 88966 - 67833));
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 88966));
+ // HTMLMediaElement fires "waiting" if somebody invokes |play()| before the MDSM
+ // has notified it of available data. Make sure that we get "playing" before
+ // we starting waiting for "waiting".
+ info("Invoking play()");
+ let p = once(el, "playing");
+ el.play();
+ await p;
+ ok(true, "Video playing. It should play for a bit, then fire 'waiting'");
+ p = once(el, "waiting");
+ el.play();
+ await p;
+ // currentTime is based on the current video frame, so if the audio ends just before
+ // the next video frame, currentTime can be up to 1 frame's worth earlier than
+ // min(audioEnd, videoEnd).
+ // 0.0465 is the length of the last audio frame.
+ ok(el.currentTime >= (sb.buffered.end(0) - 0.0465),
+ `Got a waiting event at ${el.currentTime}`);
+ info("Loading more data");
+ p = once(el, "ended");
+ await loadSegment(sb, new Uint8Array(arrayBuffer, 46712, 67833 - 46712));
+ ms.endOfStream();
+ await p;
+ // These fuzz factors are bigger than they should be. We should investigate
+ // and fix them in bug 1137574.
+ isfuzzy(el.duration, 4.001, 0.1, "Video has correct duration: " + el.duration);
+ isfuzzy(el.currentTime, el.duration, 0.1, "Video has correct currentTime.");
+ SimpleTest.finish();
+});
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WaitingOnMissingDataEnded_mp4.html b/dom/media/mediasource/test/test_WaitingOnMissingDataEnded_mp4.html
new file mode 100644
index 0000000000..8ca61eae7e
--- /dev/null
+++ b/dom/media/mediasource/test/test_WaitingOnMissingDataEnded_mp4.html
@@ -0,0 +1,47 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: |waiting| event when source data is missing</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ el.addEventListener("ended", () => {
+ ok(false, "ended should never fire");
+ SimpleTest.finish();
+ });
+ const videosb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 5), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(6, 8), ".m4s");
+ is(el.buffered.length, 2, "discontinuous buffered range");
+ ms.endOfStream();
+ await Promise.all([once(el, "durationchange"), once(ms, "sourceended")]);
+ // HTMLMediaElement fires "waiting" if somebody invokes |play()| before the MDSM
+ // has notified it of available data. Make sure that we get "playing" before
+ // we starting waiting for "waiting".
+ info("Invoking play()");
+ el.play();
+ await once(el, "playing");
+ ok(true, "Video playing. It should play for a bit, then fire 'waiting'");
+ await once(el, "waiting");
+ // waiting is fired when we start to play the last frame.
+ // 0.033334 is the duration of the last frame, + 0.000001 of fuzz.
+ // the next video frame, currentTime can be up to 1 frame's worth earlier than end of video.
+ isfuzzy(el.currentTime, videosb.buffered.end(0), 0.033334, "waiting was fired on gap");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WaitingOnMissingData_mp4.html b/dom/media/mediasource/test/test_WaitingOnMissingData_mp4.html
new file mode 100644
index 0000000000..f6768754e7
--- /dev/null
+++ b/dom/media/mediasource/test/test_WaitingOnMissingData_mp4.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: |waiting| event when source data is missing</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 5), ".m4s");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(6, 12), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 6), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(7, 14), ".m4s");
+ // HTMLMediaElement fires "waiting" if somebody invokes |play()| before the MDSM
+ // has notified it of available data. Make sure that we get "playing" before
+ // we starting waiting for "waiting".
+ info("Invoking play()");
+ let p = once(el, "playing");
+ el.play();
+ await p;
+ ok(true, "Video playing. It should play for a bit, then fire 'waiting'");
+ p = once(el, "waiting");
+ el.play();
+ await p;
+ // currentTime is based on the current video frame, so if the audio ends just before
+ // the next video frame, currentTime can be up to 1 frame's worth earlier than
+ // min(audioEnd, videoEnd).
+ // 0.0465 is the length of the last audio frame.
+ ok(el.currentTime >= (Math.min(audiosb.buffered.end(0), videosb.buffered.end(0)) - 0.0465),
+ `Got a waiting event at ${el.currentTime}`);
+ info("Loading more data");
+ p = once(el, "ended");
+ await Promise.all([
+ fetchAndLoad(audiosb, "bipbop/bipbop_audio", [5], ".m4s"),
+ fetchAndLoad(videosb, "bipbop/bipbop_video", [6], ".m4s"),
+ ]);
+ ms.endOfStream();
+ await p;
+ // These fuzz factors are bigger than they should be. We should investigate
+ // and fix them in bug 1137574.
+ isfuzzy(el.duration, 10.1, 0.1, "Video has correct duration: " + el.duration);
+ isfuzzy(el.currentTime, el.duration, 0.1, "Video has correct currentTime.");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WaitingToEndedTransition_mp4.html b/dom/media/mediasource/test/test_WaitingToEndedTransition_mp4.html
new file mode 100644
index 0000000000..9c3fc73161
--- /dev/null
+++ b/dom/media/mediasource/test/test_WaitingToEndedTransition_mp4.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<html><head>
+<meta http-equiv="content-type" content="text/html; charset=windows-1252">
+ <title>MSE: |waiting| event when source data is missing</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test"><script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ const videosb = ms.addSourceBuffer("video/mp4");
+ // ensure tracks end at approximately the same time to ensure ended event is
+ // always fired (bug 1233639).
+ audiosb.appendWindowEnd = 3.9;
+ videosb.appendWindowEnd = 3.9;
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", ["init"], ".mp4");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", ["init"], ".mp4");
+ await fetchAndLoad(audiosb, "bipbop/bipbop_audio", range(1, 5), ".m4s");
+ await fetchAndLoad(videosb, "bipbop/bipbop_video", range(1, 6), ".m4s");
+ // HTMLMediaElement fires "waiting" if somebody invokes |play()| before the MDSM
+ // has notified it of available data. Make sure that we get "playing" before
+ // we starting waiting for "waiting".
+ info("Invoking play()");
+ let p = once(el, "playing");
+ el.play();
+ await p;
+ ok(true, "Video playing. It should play for a bit, then fire 'waiting'");
+ await once(el, "waiting");
+ p = once(el, "ended");
+ ms.endOfStream();
+ await p;
+ // Following bug 1524890, we now implement fully step 8 of the coded frame
+ // processing algorithm
+ // http://w3c.github.io/media-source/index.html#sourcebuffer-coded-frame-processing
+ // As such, duration is exactly the value of videosb.appendWindowEnd
+ is(el.duration, videosb.appendWindowEnd, "Video has correct duration: " + el.duration);
+ is(el.currentTime, el.duration, "Video has correct currentTime.");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_WebMTagsBeforeCluster.html b/dom/media/mediasource/test/test_WebMTagsBeforeCluster.html
new file mode 100644
index 0000000000..d1d45173cd
--- /dev/null
+++ b/dom/media/mediasource/test/test_WebMTagsBeforeCluster.html
@@ -0,0 +1,47 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: WebM tags element before cluster element</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+addMSEPrefs(["media.mediasource.webm.enabled", true]);
+
+runWithMSE(async (ms, v) => {
+ await once(ms, "sourceopen");
+ const sb = ms.addSourceBuffer("video/webm");
+
+ const arrayBuffer = await fetchWithXHR("tags_before_cluster.webm");
+ info("- append buffer -");
+ sb.appendBuffer(new Uint8Array(arrayBuffer));
+
+ info("- wait for metadata -");
+ await once(v, "loadedmetadata");
+
+ info("- wait for updateend -");
+ await once(sb, "updateend");
+
+ info("- call end of stream -");
+ ms.endOfStream();
+ await once(ms, "sourceended");
+
+ info("- check buffered range -");
+ is(sb.buffered.length, 1, "buffered range is not empty.");
+
+ info("- video is playing -");
+ v.play();
+ await once(v, "timeupdate");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/test_trackidchange_mp4.html b/dom/media/mediasource/test/test_trackidchange_mp4.html
new file mode 100644
index 0000000000..fdbeece3cd
--- /dev/null
+++ b/dom/media/mediasource/test/test_trackidchange_mp4.html
@@ -0,0 +1,32 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>MSE: test append of audio with similar init segments that have different track ids</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="mediasource.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+runWithMSE(async (ms, el) => {
+ el.controls = true;
+ await once(ms, "sourceopen");
+ ok(true, "Receive a sourceopen event");
+ const audiosb = ms.addSourceBuffer("audio/mp4");
+ await fetchAndLoad(audiosb, "init-trackid2", [''], ".mp4");
+ await fetchAndLoad(audiosb, "segment-2.0001", [''], ".m4s");
+ await fetchAndLoad(audiosb, "init-trackid3", [''], ".mp4");
+ await fetchAndLoad(audiosb, "segment-3.0002", [''], ".m4s");
+ is(el.buffered.length, 1, "data is buffered");
+ is(el.buffered.end(0), 8, "all data got appended");
+ SimpleTest.finish();
+});
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/dom/media/mediasource/test/whitenoise-he-aac-5s.mp4 b/dom/media/mediasource/test/whitenoise-he-aac-5s.mp4
new file mode 100644
index 0000000000..db648b8229
--- /dev/null
+++ b/dom/media/mediasource/test/whitenoise-he-aac-5s.mp4
Binary files differ
diff --git a/dom/media/mediasource/test/wmf_mismatchedaudiotime.mp4 b/dom/media/mediasource/test/wmf_mismatchedaudiotime.mp4
new file mode 100644
index 0000000000..9e179bd326
--- /dev/null
+++ b/dom/media/mediasource/test/wmf_mismatchedaudiotime.mp4
Binary files differ