diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-15 03:35:49 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-15 03:35:49 +0000 |
commit | d8bbc7858622b6d9c278469aab701ca0b609cddf (patch) | |
tree | eff41dc61d9f714852212739e6b3738b82a2af87 /third_party/libwebrtc/video | |
parent | Releasing progress-linux version 125.0.3-1~progress7.99u1. (diff) | |
download | firefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.tar.xz firefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.zip |
Merging upstream version 126.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/video')
25 files changed, 1592 insertions, 968 deletions
diff --git a/third_party/libwebrtc/video/BUILD.gn b/third_party/libwebrtc/video/BUILD.gn index 0a930053c0..2d6d8ab10c 100644 --- a/third_party/libwebrtc/video/BUILD.gn +++ b/third_party/libwebrtc/video/BUILD.gn @@ -65,8 +65,6 @@ rtc_library("video") { "video_quality_observer2.h", "video_receive_stream2.cc", "video_receive_stream2.h", - "video_send_stream.cc", - "video_send_stream.h", "video_send_stream_impl.cc", "video_send_stream_impl.h", "video_stream_decoder2.cc", @@ -82,18 +80,22 @@ rtc_library("video") { ":video_stream_encoder_impl", ":video_stream_encoder_interface", "../api:array_view", + "../api:bitrate_allocation", "../api:fec_controller_api", "../api:field_trials_view", "../api:frame_transformer_interface", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:options", + "../api/environment", + "../api/metronome", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/transport:field_trial_based_config", "../api/units:data_rate", "../api/units:frequency", "../api/units:time_delta", @@ -104,6 +106,8 @@ rtc_library("video") { "../api/video:video_bitrate_allocator", "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", "../api/video_codecs:video_codecs_api", @@ -141,7 +145,6 @@ rtc_library("video") { "../rtc_base:rate_tracker", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:sample_counter", "../rtc_base:stringutils", @@ -230,6 +233,7 @@ rtc_library("frame_cadence_adapter") { deps = [ "../api:field_trials_view", "../api:sequence_checker", + "../api/metronome", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/units:time_delta", @@ -253,6 +257,7 @@ rtc_library("frame_cadence_adapter") { absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/cleanup:cleanup", ] } @@ -444,7 +449,6 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base:refcount", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base:timeutils", @@ -816,6 +820,8 @@ if (rtc_include_tests) { ":video_stream_buffer_controller", ":video_stream_encoder_impl", ":video_stream_encoder_interface", + "../api:array_view", + "../api:bitrate_allocation", "../api:create_frame_generator", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", @@ -835,6 +841,7 @@ if (rtc_include_tests) { "../api:time_controller", "../api:transport_api", "../api/adaptation:resource_adaptation_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:options", "../api/environment", "../api/environment:environment_factory", @@ -856,11 +863,13 @@ if (rtc_include_tests) { "../api/video:video_bitrate_allocation", "../api/video:video_frame", "../api/video:video_frame_type", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video/test:video_frame_matchers", "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:vp8_temporal_layers_factory", + "../call:bitrate_allocator", "../call:call_interfaces", "../call:fake_network", "../call:mock_bitrate_allocator", @@ -917,7 +926,6 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", diff --git a/third_party/libwebrtc/video/config/simulcast.cc b/third_party/libwebrtc/video/config/simulcast.cc index 2bd4ac04c3..7a78ef8d05 100644 --- a/third_party/libwebrtc/video/config/simulcast.cc +++ b/third_party/libwebrtc/video/config/simulcast.cc @@ -350,10 +350,9 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers( bool base_heavy_tl3_rate_alloc, const webrtc::FieldTrialsView& trials) { std::vector<webrtc::VideoStream> layers(layer_count); - const bool enable_lowres_bitrate_interpolation = EnableLowresBitrateInterpolation(trials); - + const int num_temporal_layers = DefaultNumberOfTemporalLayers(trials); // Format width and height has to be divisible by |2 ^ num_simulcast_layers - // 1|. width = NormalizeSimulcastSize(width, layer_count); @@ -366,7 +365,7 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers( // TODO(pbos): Fill actual temporal-layer bitrate thresholds. layers[s].max_qp = max_qp; layers[s].num_temporal_layers = - temporal_layers_supported ? DefaultNumberOfTemporalLayers(trials) : 1; + temporal_layers_supported ? num_temporal_layers : 1; layers[s].max_bitrate_bps = FindSimulcastMaxBitrate(width, height, enable_lowres_bitrate_interpolation) @@ -375,7 +374,6 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers( FindSimulcastTargetBitrate(width, height, enable_lowres_bitrate_interpolation) .bps(); - int num_temporal_layers = DefaultNumberOfTemporalLayers(trials); if (s == 0) { // If alternative temporal rate allocation is selected, adjust the // bitrate of the lowest simulcast stream so that absolute bitrate for diff --git a/third_party/libwebrtc/video/frame_cadence_adapter.cc b/third_party/libwebrtc/video/frame_cadence_adapter.cc index 2c4acdd6c2..4aea1acec6 100644 --- a/third_party/libwebrtc/video/frame_cadence_adapter.cc +++ b/third_party/libwebrtc/video/frame_cadence_adapter.cc @@ -19,6 +19,7 @@ #include "absl/algorithm/container.h" #include "absl/base/attributes.h" +#include "absl/cleanup/cleanup.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" @@ -102,7 +103,9 @@ class ZeroHertzAdapterMode : public AdapterMode { ZeroHertzAdapterMode(TaskQueueBase* queue, Clock* clock, FrameCadenceAdapterInterface::Callback* callback, - double max_fps); + double max_fps, + std::atomic<int>& frames_scheduled_for_processing, + bool zero_hertz_queue_overload); ~ZeroHertzAdapterMode() { refresh_frame_requester_.Stop(); } // Reconfigures according to parameters. @@ -189,12 +192,20 @@ class ZeroHertzAdapterMode : public AdapterMode { // have arrived. void ProcessRepeatedFrameOnDelayedCadence(int frame_id) RTC_RUN_ON(sequence_checker_); - // Sends a frame, updating the timestamp to the current time. - void SendFrameNow(Timestamp post_time, const VideoFrame& frame) const - RTC_RUN_ON(sequence_checker_); + // Sends a frame, updating the timestamp to the current time. Also updates + // `queue_overload_count_` based on the time it takes to encode a frame and + // the amount of received frames while encoding. The `queue_overload` + // parameter in the OnFrame callback will be true while + // `queue_overload_count_` is larger than zero to allow the client to drop + // frames and thereby mitigate delay buildups. + // Repeated frames are sent with `post_time` set to absl::nullopt. + void SendFrameNow(absl::optional<Timestamp> post_time, + const VideoFrame& frame) RTC_RUN_ON(sequence_checker_); // Returns the repeat duration depending on if it's an idle repeat or not. TimeDelta RepeatDuration(bool idle_repeat) const RTC_RUN_ON(sequence_checker_); + // Returns the frame duration taking potential restrictions into account. + TimeDelta FrameDuration() const RTC_RUN_ON(sequence_checker_); // Unless timer already running, starts repeatedly requesting refresh frames // after a grace_period. If a frame appears before the grace_period has // passed, the request is cancelled. @@ -207,6 +218,14 @@ class ZeroHertzAdapterMode : public AdapterMode { // The configured max_fps. // TODO(crbug.com/1255737): support max_fps updates. const double max_fps_; + + // Number of frames that are currently scheduled for processing on the + // `queue_`. + const std::atomic<int>& frames_scheduled_for_processing_; + + // Can be used as kill-switch for the queue overload mechanism. + const bool zero_hertz_queue_overload_enabled_; + // How much the incoming frame sequence is delayed by. const TimeDelta frame_delay_ = TimeDelta::Seconds(1) / max_fps_; @@ -230,14 +249,88 @@ class ZeroHertzAdapterMode : public AdapterMode { // the max frame rate. absl::optional<TimeDelta> restricted_frame_delay_ RTC_GUARDED_BY(sequence_checker_); + // Set in OnSendFrame to reflect how many future frames will be forwarded with + // the `queue_overload` flag set to true. + int queue_overload_count_ RTC_GUARDED_BY(sequence_checker_) = 0; ScopedTaskSafety safety_; }; +// Implements a frame cadence adapter supporting VSync aligned encoding. +class VSyncEncodeAdapterMode : public AdapterMode { + public: + VSyncEncodeAdapterMode( + Clock* clock, + TaskQueueBase* queue, + rtc::scoped_refptr<PendingTaskSafetyFlag> queue_safety_flag, + Metronome* metronome, + TaskQueueBase* worker_queue, + FrameCadenceAdapterInterface::Callback* callback) + : clock_(clock), + queue_(queue), + queue_safety_flag_(queue_safety_flag), + callback_(callback), + metronome_(metronome), + worker_queue_(worker_queue) { + queue_sequence_checker_.Detach(); + worker_sequence_checker_.Detach(); + } + + // Adapter overrides. + void OnFrame(Timestamp post_time, + bool queue_overload, + const VideoFrame& frame) override; + + absl::optional<uint32_t> GetInputFrameRateFps() override { + RTC_DCHECK_RUN_ON(&queue_sequence_checker_); + return input_framerate_.Rate(clock_->TimeInMilliseconds()); + } + + void UpdateFrameRate() override { + RTC_DCHECK_RUN_ON(&queue_sequence_checker_); + input_framerate_.Update(1, clock_->TimeInMilliseconds()); + } + + void EncodeAllEnqueuedFrames(); + + private: + // Holds input frames coming from the client ready to be encoded. + struct InputFrameRef { + InputFrameRef(const VideoFrame& video_frame, Timestamp time_when_posted_us) + : time_when_posted_us(time_when_posted_us), + video_frame(std::move(video_frame)) {} + Timestamp time_when_posted_us; + const VideoFrame video_frame; + }; + + Clock* const clock_; + TaskQueueBase* queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker queue_sequence_checker_; + rtc::scoped_refptr<PendingTaskSafetyFlag> queue_safety_flag_; + // Input frame rate statistics for use when not in zero-hertz mode. + RateStatistics input_framerate_ RTC_GUARDED_BY(queue_sequence_checker_){ + FrameCadenceAdapterInterface::kFrameRateAveragingWindowSizeMs, 1000}; + FrameCadenceAdapterInterface::Callback* const callback_; + + Metronome* metronome_; + TaskQueueBase* const worker_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; + // `worker_safety_` protects tasks on the worker queue related to `metronome_` + // since metronome usage must happen on worker thread. + ScopedTaskSafetyDetached worker_safety_; + Timestamp expected_next_tick_ RTC_GUARDED_BY(worker_sequence_checker_) = + Timestamp::PlusInfinity(); + // Vector of input frames to be encoded. + std::vector<InputFrameRef> input_queue_ + RTC_GUARDED_BY(worker_sequence_checker_); +}; + class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { public: FrameCadenceAdapterImpl(Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials); ~FrameCadenceAdapterImpl(); @@ -273,6 +366,10 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { // - zero-hertz mode enabled bool IsZeroHertzScreenshareEnabled() const RTC_RUN_ON(queue_); + // Configures current adapter on non-ZeroHertz mode, called when Initialize or + // MaybeReconfigureAdapters. + void ConfigureCurrentAdapterWithoutZeroHertz(); + // Handles adapter creation on configuration changes. void MaybeReconfigureAdapters(bool was_zero_hertz_enabled) RTC_RUN_ON(queue_); @@ -283,15 +380,24 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { // 0 Hz. const bool zero_hertz_screenshare_enabled_; - // The two possible modes we're under. + // Kill-switch for the queue overload mechanism in zero-hertz mode. + const bool frame_cadence_adapter_zero_hertz_queue_overload_enabled_; + + // The three possible modes we're under. absl::optional<PassthroughAdapterMode> passthrough_adapter_; absl::optional<ZeroHertzAdapterMode> zero_hertz_adapter_; + // The `vsync_encode_adapter_` must be destroyed on the worker queue since + // VSync metronome needs to happen on worker thread. + std::unique_ptr<VSyncEncodeAdapterMode> vsync_encode_adapter_; // If set, zero-hertz mode has been enabled. absl::optional<ZeroHertzModeParams> zero_hertz_params_; - std::atomic<bool> zero_hertz_adapter_is_active_{false}; // Cache for the current adapter mode. AdapterMode* current_adapter_mode_ = nullptr; + // VSync encoding is used when this valid. + Metronome* const metronome_; + TaskQueueBase* const worker_queue_; + // Timestamp for statistics reporting. absl::optional<Timestamp> zero_hertz_adapter_created_timestamp_ RTC_GUARDED_BY(queue_); @@ -323,8 +429,15 @@ ZeroHertzAdapterMode::ZeroHertzAdapterMode( TaskQueueBase* queue, Clock* clock, FrameCadenceAdapterInterface::Callback* callback, - double max_fps) - : queue_(queue), clock_(clock), callback_(callback), max_fps_(max_fps) { + double max_fps, + std::atomic<int>& frames_scheduled_for_processing, + bool zero_hertz_queue_overload_enabled) + : queue_(queue), + clock_(clock), + callback_(callback), + max_fps_(max_fps), + frames_scheduled_for_processing_(frames_scheduled_for_processing), + zero_hertz_queue_overload_enabled_(zero_hertz_queue_overload_enabled) { sequence_checker_.Detach(); MaybeStartRefreshFrameRequester(); } @@ -391,22 +504,13 @@ void ZeroHertzAdapterMode::OnFrame(Timestamp post_time, // Store the frame in the queue and schedule deferred processing. queued_frames_.push_back(frame); - int frame_id = current_frame_id_; current_frame_id_++; scheduled_repeat_ = absl::nullopt; TimeDelta time_spent_since_post = clock_->CurrentTime() - post_time; - TRACE_EVENT_ASYNC_BEGIN0(TRACE_DISABLED_BY_DEFAULT("webrtc"), "QueueToEncode", - frame_id); queue_->PostDelayedHighPrecisionTask( SafeTask(safety_.flag(), - [this, post_time, frame_id, frame] { - RTC_UNUSED(frame_id); + [this, post_time] { RTC_DCHECK_RUN_ON(&sequence_checker_); - TRACE_EVENT_ASYNC_END0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "QueueToEncode", frame_id); - TRACE_EVENT_ASYNC_END0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "OnFrameToEncode", - frame.video_frame_buffer().get()); ProcessOnDelayedCadence(post_time); }), std::max(frame_delay_ - time_spent_since_post, TimeDelta::Zero())); @@ -582,36 +686,70 @@ void ZeroHertzAdapterMode::ProcessRepeatedFrameOnDelayedCadence(int frame_id) { // Schedule another repeat before sending the frame off which could take time. ScheduleRepeat(frame_id, HasQualityConverged()); - // Mark `post_time` with 0 to signal that this is a repeated frame. - SendFrameNow(Timestamp::Zero(), frame); + SendFrameNow(absl::nullopt, frame); } -void ZeroHertzAdapterMode::SendFrameNow(Timestamp post_time, - const VideoFrame& frame) const { +void ZeroHertzAdapterMode::SendFrameNow(absl::optional<Timestamp> post_time, + const VideoFrame& frame) { RTC_DCHECK_RUN_ON(&sequence_checker_); TRACE_EVENT0("webrtc", __func__); - Timestamp now = clock_->CurrentTime(); - // Exclude repeated frames which are marked with zero as post time. - if (post_time != Timestamp::Zero()) { - TimeDelta delay = (now - post_time); + + Timestamp encode_start_time = clock_->CurrentTime(); + if (post_time.has_value()) { + TimeDelta delay = (encode_start_time - *post_time); RTC_HISTOGRAM_COUNTS_10000("WebRTC.Screenshare.ZeroHz.DelayMs", delay.ms()); } - // TODO(crbug.com/1255737): ensure queue_overload is computed from current - // conditions on the encoder queue. - callback_->OnFrame(/*post_time=*/now, - /*queue_overload=*/false, frame); + + // Forward the frame and set `queue_overload` if is has been detected that it + // is not possible to deliver frames at the expected rate due to slow + // encoding. + callback_->OnFrame(/*post_time=*/encode_start_time, queue_overload_count_ > 0, + frame); + + // WebRTC-ZeroHertzQueueOverload kill-switch. + if (!zero_hertz_queue_overload_enabled_) + return; + + // `queue_overload_count_` determines for how many future frames the + // `queue_overload` flag will be set and it is only increased if: + // o We are not already in an overload state. + // o New frames have been scheduled for processing on the queue while encoding + // took place in OnFrame. + // o The duration of OnFrame is longer than the current frame duration. + // If all these conditions are fulfilled, `queue_overload_count_` is set to + // `frames_scheduled_for_processing_` and any pending repeat is canceled since + // new frames are available and the repeat is not needed. + // If the adapter is already in an overload state, simply decrease + // `queue_overload_count_` by one. + if (queue_overload_count_ == 0) { + const int frames_scheduled_for_processing = + frames_scheduled_for_processing_.load(std::memory_order_relaxed); + if (frames_scheduled_for_processing > 0) { + TimeDelta encode_time = clock_->CurrentTime() - encode_start_time; + if (encode_time > FrameDuration()) { + queue_overload_count_ = frames_scheduled_for_processing; + // Invalidates any outstanding repeat to avoid sending pending repeat + // directly after too long encode. + current_frame_id_++; + } + } + } else { + queue_overload_count_--; + } + RTC_HISTOGRAM_BOOLEAN("WebRTC.Screenshare.ZeroHz.QueueOverload", + queue_overload_count_ > 0); +} + +TimeDelta ZeroHertzAdapterMode::FrameDuration() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return std::max(frame_delay_, restricted_frame_delay_.value_or(frame_delay_)); } TimeDelta ZeroHertzAdapterMode::RepeatDuration(bool idle_repeat) const { RTC_DCHECK_RUN_ON(&sequence_checker_); - // By default use `frame_delay_` in non-idle repeat mode but use the - // restricted frame delay instead if it is set in - // UpdateVideoSourceRestrictions. - TimeDelta frame_delay = - std::max(frame_delay_, restricted_frame_delay_.value_or(frame_delay_)); return idle_repeat ? FrameCadenceAdapterInterface::kZeroHertzIdleRepeatRatePeriod - : frame_delay; + : FrameDuration(); } void ZeroHertzAdapterMode::MaybeStartRefreshFrameRequester() { @@ -630,23 +768,100 @@ void ZeroHertzAdapterMode::MaybeStartRefreshFrameRequester() { } } +void VSyncEncodeAdapterMode::OnFrame(Timestamp post_time, + bool queue_overload, + const VideoFrame& frame) { + // We expect `metronome_` and `EncodeAllEnqueuedFrames()` runs on + // `worker_queue_`. + if (!worker_queue_->IsCurrent()) { + worker_queue_->PostTask(SafeTask( + worker_safety_.flag(), [this, post_time, queue_overload, frame] { + OnFrame(post_time, queue_overload, frame); + })); + return; + } + + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + TRACE_EVENT0("webrtc", "VSyncEncodeAdapterMode::OnFrame"); + + input_queue_.emplace_back(std::move(frame), post_time); + + // The `metronome_` tick period maybe throttled in some case, so here we only + // align encode task to VSync event when `metronome_` tick period is less + // than 34ms (30Hz). + static constexpr TimeDelta kMaxAllowedDelay = TimeDelta::Millis(34); + if (metronome_->TickPeriod() <= kMaxAllowedDelay) { + // The metronome is ticking frequently enough that it is worth the extra + // delay. + metronome_->RequestCallOnNextTick( + SafeTask(worker_safety_.flag(), [this] { EncodeAllEnqueuedFrames(); })); + } else { + // The metronome is ticking too infrequently, encode immediately. + EncodeAllEnqueuedFrames(); + } +} + +void VSyncEncodeAdapterMode::EncodeAllEnqueuedFrames() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + TRACE_EVENT0("webrtc", "VSyncEncodeAdapterMode::EncodeAllEnqueuedFrames"); + + // Local time in webrtc time base. + Timestamp post_time = clock_->CurrentTime(); + + for (auto& input : input_queue_) { + TRACE_EVENT1("webrtc", "FrameCadenceAdapterImpl::EncodeAllEnqueuedFrames", + "VSyncEncodeDelay", + (post_time - input.time_when_posted_us).ms()); + + const VideoFrame frame = std::move(input.video_frame); + queue_->PostTask(SafeTask(queue_safety_flag_, [this, post_time, frame] { + RTC_DCHECK_RUN_ON(queue_); + + // TODO(b/304158952): Support more refined queue overload control. + callback_->OnFrame(post_time, /*queue_overload=*/false, frame); + })); + } + + input_queue_.clear(); +} + FrameCadenceAdapterImpl::FrameCadenceAdapterImpl( Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials) : clock_(clock), queue_(queue), zero_hertz_screenshare_enabled_( - !field_trials.IsDisabled("WebRTC-ZeroHertzScreenshare")) {} + !field_trials.IsDisabled("WebRTC-ZeroHertzScreenshare")), + frame_cadence_adapter_zero_hertz_queue_overload_enabled_( + !field_trials.IsDisabled("WebRTC-ZeroHertzQueueOverload")), + metronome_(metronome), + worker_queue_(worker_queue) {} FrameCadenceAdapterImpl::~FrameCadenceAdapterImpl() { RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; + + // VSync adapter needs to be destroyed on worker queue when metronome is + // valid. + if (metronome_) { + absl::Cleanup cleanup = [adapter = std::move(vsync_encode_adapter_)] {}; + worker_queue_->PostTask([cleanup = std::move(cleanup)] {}); + } } void FrameCadenceAdapterImpl::Initialize(Callback* callback) { callback_ = callback; - passthrough_adapter_.emplace(clock_, callback); - current_adapter_mode_ = &passthrough_adapter_.value(); + // Use VSync encode mode if metronome is valid, otherwise passthrough mode + // would be used. + if (metronome_) { + vsync_encode_adapter_ = std::make_unique<VSyncEncodeAdapterMode>( + clock_, queue_, safety_.flag(), metronome_, worker_queue_, callback_); + } else { + passthrough_adapter_.emplace(clock_, callback); + } + ConfigureCurrentAdapterWithoutZeroHertz(); } void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled( @@ -665,9 +880,16 @@ absl::optional<uint32_t> FrameCadenceAdapterImpl::GetInputFrameRateFps() { void FrameCadenceAdapterImpl::UpdateFrameRate() { RTC_DCHECK_RUN_ON(queue_); // The frame rate need not be updated for the zero-hertz adapter. The - // passthrough adapter however uses it. Always pass frames into the - // passthrough to keep the estimation alive should there be an adapter switch. - passthrough_adapter_->UpdateFrameRate(); + // vsync encode and passthrough adapter however uses it. Always pass frames + // into the vsync encode or passthrough to keep the estimation alive should + // there be an adapter switch. + if (metronome_) { + RTC_CHECK(vsync_encode_adapter_); + vsync_encode_adapter_->UpdateFrameRate(); + } else { + RTC_CHECK(passthrough_adapter_); + passthrough_adapter_->UpdateFrameRate(); + } } void FrameCadenceAdapterImpl::UpdateLayerQualityConvergence( @@ -710,21 +932,8 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { // Local time in webrtc time base. Timestamp post_time = clock_->CurrentTime(); frames_scheduled_for_processing_.fetch_add(1, std::memory_order_relaxed); - if (zero_hertz_adapter_is_active_.load(std::memory_order_relaxed)) { - TRACE_EVENT_ASYNC_BEGIN0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "OnFrameToEncode", - frame.video_frame_buffer().get()); - TRACE_EVENT_ASYNC_BEGIN0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "OnFrameToQueue", - frame.video_frame_buffer().get()); - } queue_->PostTask(SafeTask(safety_.flag(), [this, post_time, frame] { RTC_DCHECK_RUN_ON(queue_); - if (zero_hertz_adapter_is_active_.load(std::memory_order_relaxed)) { - TRACE_EVENT_ASYNC_END0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "OnFrameToQueue", - frame.video_frame_buffer().get()); - } if (zero_hertz_adapter_created_timestamp_.has_value()) { TimeDelta time_until_first_frame = clock_->CurrentTime() - *zero_hertz_adapter_created_timestamp_; @@ -780,6 +989,17 @@ bool FrameCadenceAdapterImpl::IsZeroHertzScreenshareEnabled() const { zero_hertz_params_.has_value(); } +void FrameCadenceAdapterImpl::ConfigureCurrentAdapterWithoutZeroHertz() { + // Enable VSyncEncodeAdapterMode if metronome is valid. + if (metronome_) { + RTC_CHECK(vsync_encode_adapter_); + current_adapter_mode_ = vsync_encode_adapter_.get(); + } else { + RTC_CHECK(passthrough_adapter_); + current_adapter_mode_ = &passthrough_adapter_.value(); + } +} + void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( bool was_zero_hertz_enabled) { RTC_DCHECK_RUN_ON(queue_); @@ -790,8 +1010,10 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( if (!was_zero_hertz_enabled || max_fps_has_changed) { RTC_LOG(LS_INFO) << "Zero hertz mode enabled (max_fps=" << source_constraints_->max_fps.value() << ")"; - zero_hertz_adapter_.emplace(queue_, clock_, callback_, - source_constraints_->max_fps.value()); + zero_hertz_adapter_.emplace( + queue_, clock_, callback_, source_constraints_->max_fps.value(), + frames_scheduled_for_processing_, + frame_cadence_adapter_zero_hertz_queue_overload_enabled_); zero_hertz_adapter_->UpdateVideoSourceRestrictions( restricted_max_frame_rate_); zero_hertz_adapter_created_timestamp_ = clock_->CurrentTime(); @@ -801,10 +1023,9 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( } else { if (was_zero_hertz_enabled) { zero_hertz_adapter_ = absl::nullopt; - zero_hertz_adapter_is_active_.store(false, std::memory_order_relaxed); RTC_LOG(LS_INFO) << "Zero hertz mode disabled."; } - current_adapter_mode_ = &passthrough_adapter_.value(); + ConfigureCurrentAdapterWithoutZeroHertz(); } } @@ -813,8 +1034,11 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( std::unique_ptr<FrameCadenceAdapterInterface> FrameCadenceAdapterInterface::Create(Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials) { - return std::make_unique<FrameCadenceAdapterImpl>(clock, queue, field_trials); + return std::make_unique<FrameCadenceAdapterImpl>(clock, queue, metronome, + worker_queue, field_trials); } } // namespace webrtc diff --git a/third_party/libwebrtc/video/frame_cadence_adapter.h b/third_party/libwebrtc/video/frame_cadence_adapter.h index 2b62bb26cd..ec8e667b04 100644 --- a/third_party/libwebrtc/video/frame_cadence_adapter.h +++ b/third_party/libwebrtc/video/frame_cadence_adapter.h @@ -15,6 +15,7 @@ #include "absl/base/attributes.h" #include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/video/video_frame.h" @@ -81,6 +82,8 @@ class FrameCadenceAdapterInterface static std::unique_ptr<FrameCadenceAdapterInterface> Create( Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials); // Call before using the rest of the API. diff --git a/third_party/libwebrtc/video/frame_cadence_adapter_unittest.cc b/third_party/libwebrtc/video/frame_cadence_adapter_unittest.cc index 0fef2400f0..54548de9bb 100644 --- a/third_party/libwebrtc/video/frame_cadence_adapter_unittest.cc +++ b/third_party/libwebrtc/video/frame_cadence_adapter_unittest.cc @@ -14,6 +14,7 @@ #include <vector> #include "absl/functional/any_invocable.h" +#include "api/metronome/test/fake_metronome.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" @@ -38,9 +39,11 @@ namespace { using ::testing::_; using ::testing::ElementsAre; +using ::testing::InSequence; using ::testing::Invoke; using ::testing::InvokeWithoutArgs; using ::testing::Mock; +using ::testing::NiceMock; using ::testing::Pair; using ::testing::Values; @@ -64,8 +67,9 @@ VideoFrame CreateFrameWithTimestamps( std::unique_ptr<FrameCadenceAdapterInterface> CreateAdapter( const FieldTrialsView& field_trials, Clock* clock) { - return FrameCadenceAdapterInterface::Create(clock, TaskQueueBase::Current(), - field_trials); + return FrameCadenceAdapterInterface::Create( + clock, TaskQueueBase::Current(), /*metronome=*/nullptr, + /*worker_queue=*/nullptr, field_trials); } class MockCallback : public FrameCadenceAdapterInterface::Callback { @@ -308,6 +312,7 @@ TEST(FrameCadenceAdapterTest, DelayedProcessingUnderHeavyContention) { })); adapter->OnFrame(CreateFrame()); time_controller.SkipForwardBy(time_skipped); + time_controller.AdvanceTime(TimeDelta::Zero()); } TEST(FrameCadenceAdapterTest, RepeatsFramesDelayed) { @@ -593,7 +598,8 @@ TEST(FrameCadenceAdapterTest, IgnoresDropInducedCallbacksPostDestruction) { auto queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( "queue", TaskQueueFactory::Priority::NORMAL); auto adapter = FrameCadenceAdapterInterface::Create( - time_controller.GetClock(), queue.get(), enabler); + time_controller.GetClock(), queue.get(), /*metronome=*/nullptr, + /*worker_queue=*/nullptr, enabler); queue->PostTask([&adapter, &callback] { adapter->Initialize(callback.get()); adapter->SetZeroHertzModeEnabled( @@ -609,6 +615,82 @@ TEST(FrameCadenceAdapterTest, IgnoresDropInducedCallbacksPostDestruction) { time_controller.AdvanceTime(3 * TimeDelta::Seconds(1) / kMaxFps); } +TEST(FrameCadenceAdapterTest, EncodeFramesAreAlignedWithMetronomeTick) { + ZeroHertzFieldTrialEnabler enabler; + GlobalSimulatedTimeController time_controller(Timestamp::Zero()); + // Here the metronome interval is 33ms, because the metronome is not + // infrequent then the encode tasks are aligned with the tick period. + static constexpr TimeDelta kTickPeriod = TimeDelta::Millis(33); + auto queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( + "queue", TaskQueueFactory::Priority::NORMAL); + auto worker_queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( + "work_queue", TaskQueueFactory::Priority::NORMAL); + static test::FakeMetronome metronome(kTickPeriod); + auto adapter = FrameCadenceAdapterInterface::Create( + time_controller.GetClock(), queue.get(), &metronome, worker_queue.get(), + enabler); + MockCallback callback; + adapter->Initialize(&callback); + auto frame = CreateFrame(); + + // `callback->OnFrame()` would not be called if only 32ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(32)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` should be called if 33ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` would not be called if only 32ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + // Send two frame before next tick. + adapter->OnFrame(frame); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(32)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` should be called if 33ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(2); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + // Change the metronome tick period to 67ms (15Hz). + metronome.SetTickPeriod(TimeDelta::Millis(67)); + // Expect the encode would happen immediately. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Zero()); + Mock::VerifyAndClearExpectations(&callback); + + // Change the metronome tick period to 16ms (60Hz). + metronome.SetTickPeriod(TimeDelta::Millis(16)); + // Expect the encode would not happen if only 15ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(15)); + Mock::VerifyAndClearExpectations(&callback); + // `callback->OnFrame()` should be called if 16ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + rtc::Event finalized; + queue->PostTask([&] { + adapter = nullptr; + finalized.Set(); + }); + finalized.Wait(rtc::Event::kForever); +} + class FrameCadenceAdapterSimulcastLayersParamTest : public ::testing::TestWithParam<int> { public: @@ -1076,5 +1158,166 @@ TEST(FrameCadenceAdapterRealTimeTest, finalized.Wait(rtc::Event::kForever); } +class ZeroHertzQueueOverloadTest : public ::testing::Test { + public: + static constexpr int kMaxFps = 10; + + ZeroHertzQueueOverloadTest() { + Initialize(); + metrics::Reset(); + } + + void Initialize() { + adapter_->Initialize(&callback_); + adapter_->SetZeroHertzModeEnabled( + FrameCadenceAdapterInterface::ZeroHertzModeParams{ + /*num_simulcast_layers=*/1}); + adapter_->OnConstraintsChanged( + VideoTrackSourceConstraints{/*min_fps=*/0, kMaxFps}); + time_controller_.AdvanceTime(TimeDelta::Zero()); + } + + void ScheduleDelayed(TimeDelta delay, absl::AnyInvocable<void() &&> task) { + TaskQueueBase::Current()->PostDelayedTask(std::move(task), delay); + } + + void PassFrame() { adapter_->OnFrame(CreateFrame()); } + + void AdvanceTime(TimeDelta duration) { + time_controller_.AdvanceTime(duration); + } + + void SkipForwardBy(TimeDelta duration) { + time_controller_.SkipForwardBy(duration); + } + + Timestamp CurrentTime() { return time_controller_.GetClock()->CurrentTime(); } + + protected: + test::ScopedKeyValueConfig field_trials_; + NiceMock<MockCallback> callback_; + GlobalSimulatedTimeController time_controller_{Timestamp::Zero()}; + std::unique_ptr<FrameCadenceAdapterInterface> adapter_{ + CreateAdapter(field_trials_, time_controller_.GetClock())}; +}; + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesDuringTooLongEncodeTimeAreFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 1), Pair(true, 3))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesAfterOverloadBurstAreNotFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(2); + PassFrame(); + PassFrame(); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 3), Pair(true, 3))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesDuringNormalEncodeTimeAreNotFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + // Long but not too long encode time. + SkipForwardBy(TimeDelta::Millis(99)); + })); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(3); + AdvanceTime(TimeDelta::Millis(199)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 4))); +} + +TEST_F( + ZeroHertzQueueOverloadTest, + AvoidSettingQueueOverloadAndSendRepeatWhenNoNewPacketsWhileTooLongEncode) { + // Receive one frame only and let OnFrame take such a long time that an + // overload normally is warranted. But the fact that no new frames arrive + // while being blocked should trigger a non-idle repeat to ensure that the + // video stream does not freeze and queue overload should be false. + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)) + .WillOnce( + InvokeWithoutArgs([&] { SkipForwardBy(TimeDelta::Millis(101)); })) + .WillOnce(InvokeWithoutArgs([&] { + // Non-idle repeat. + EXPECT_EQ(CurrentTime(), Timestamp::Zero() + TimeDelta::Millis(201)); + })); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 2))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + EnterFastRepeatAfterQueueOverloadWhenReceivedOnlyOneFrameDuringEncode) { + InSequence s; + // - Forward one frame frame during high load which triggers queue overload. + // - Receive only one new frame while being blocked and verify that the + // cancelled repeat was for the first frame and not the second. + // - Fast repeat mode should happen after second frame. + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + SkipForwardBy(TimeDelta::Millis(101)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)); + AdvanceTime(TimeDelta::Millis(100)); + + // Fast repeats should take place from here on. + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(5); + AdvanceTime(TimeDelta::Millis(500)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 6), Pair(true, 1))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + QueueOverloadIsDisabledForZeroHerzWhenKillSwitchIsEnabled) { + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-ZeroHertzQueueOverload/Disabled/"); + adapter_.reset(); + adapter_ = CreateAdapter(field_trials, time_controller_.GetClock()); + Initialize(); + + // Same as ForwardedFramesDuringTooLongEncodeTimeAreFlaggedWithQueueOverload + // but this time the queue overload mechanism is disabled. + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_EQ(metrics::NumSamples("WebRTC.Screenshare.ZeroHz.QueueOverload"), 0); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/video/full_stack_tests.cc b/third_party/libwebrtc/video/full_stack_tests.cc index 7791afc854..335a9363af 100644 --- a/third_party/libwebrtc/video/full_stack_tests.cc +++ b/third_party/libwebrtc/video/full_stack_tests.cc @@ -135,7 +135,7 @@ TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { return; auto fixture = CreateVideoQualityTestFixture(); - SdpVideoFormat::Parameters vp92 = { + CodecParameterMap vp92 = { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}; ParamsWithLogging generator; generator.call.send_side_bwe = true; diff --git a/third_party/libwebrtc/video/render/BUILD.gn b/third_party/libwebrtc/video/render/BUILD.gn index ff721dc61c..a948a0e2fa 100644 --- a/third_party/libwebrtc/video/render/BUILD.gn +++ b/third_party/libwebrtc/video/render/BUILD.gn @@ -26,7 +26,6 @@ rtc_library("incoming_video_stream") { "../../rtc_base:event_tracer", "../../rtc_base:macromagic", "../../rtc_base:race_checker", - "../../rtc_base:rtc_task_queue", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] diff --git a/third_party/libwebrtc/video/render/incoming_video_stream.cc b/third_party/libwebrtc/video/render/incoming_video_stream.cc index e740c47bd0..650036ddc9 100644 --- a/third_party/libwebrtc/video/render/incoming_video_stream.cc +++ b/third_party/libwebrtc/video/render/incoming_video_stream.cc @@ -33,17 +33,23 @@ IncomingVideoStream::IncomingVideoStream( IncomingVideoStream::~IncomingVideoStream() { RTC_DCHECK(main_thread_checker_.IsCurrent()); + // The queue must be destroyed before its pointer is invalidated to avoid race + // between destructor and posting task to the task queue from itself. + // std::unique_ptr destructor does the same two operations in reverse order as + // it doesn't expect member would be used after its destruction has started. + incoming_render_queue_.get_deleter()(incoming_render_queue_.get()); + incoming_render_queue_.release(); } void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { TRACE_EVENT0("webrtc", "IncomingVideoStream::OnFrame"); RTC_CHECK_RUNS_SERIALIZED(&decoder_race_checker_); - RTC_DCHECK(!incoming_render_queue_.IsCurrent()); + RTC_DCHECK(!incoming_render_queue_->IsCurrent()); // TODO(srte): Using video_frame = std::move(video_frame) would move the frame // into the lambda instead of copying it, but it doesn't work unless we change // OnFrame to take its frame argument by value instead of const reference. - incoming_render_queue_.PostTask([this, video_frame = video_frame]() mutable { - RTC_DCHECK_RUN_ON(&incoming_render_queue_); + incoming_render_queue_->PostTask([this, video_frame = video_frame]() mutable { + RTC_DCHECK_RUN_ON(incoming_render_queue_.get()); if (render_buffers_.AddFrame(std::move(video_frame)) == 1) Dequeue(); }); @@ -51,14 +57,14 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { void IncomingVideoStream::Dequeue() { TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue"); - RTC_DCHECK_RUN_ON(&incoming_render_queue_); + RTC_DCHECK_RUN_ON(incoming_render_queue_.get()); absl::optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender(); if (frame_to_render) callback_->OnFrame(*frame_to_render); if (render_buffers_.HasPendingFrames()) { uint32_t wait_time = render_buffers_.TimeToNextFrameRelease(); - incoming_render_queue_.PostDelayedHighPrecisionTask( + incoming_render_queue_->PostDelayedHighPrecisionTask( [this]() { Dequeue(); }, TimeDelta::Millis(wait_time)); } } diff --git a/third_party/libwebrtc/video/render/incoming_video_stream.h b/third_party/libwebrtc/video/render/incoming_video_stream.h index 4873ae7dcb..066c0db317 100644 --- a/third_party/libwebrtc/video/render/incoming_video_stream.h +++ b/third_party/libwebrtc/video/render/incoming_video_stream.h @@ -13,12 +13,14 @@ #include <stdint.h> +#include <memory> + #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "rtc_base/race_checker.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" #include "video/render/video_render_frames.h" @@ -38,9 +40,9 @@ class IncomingVideoStream : public rtc::VideoSinkInterface<VideoFrame> { SequenceChecker main_thread_checker_; rtc::RaceChecker decoder_race_checker_; - VideoRenderFrames render_buffers_ RTC_GUARDED_BY(&incoming_render_queue_); + VideoRenderFrames render_buffers_ RTC_GUARDED_BY(incoming_render_queue_); rtc::VideoSinkInterface<VideoFrame>* const callback_; - rtc::TaskQueue incoming_render_queue_; + std::unique_ptr<TaskQueueBase, TaskQueueDeleter> incoming_render_queue_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc b/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc index c4a021d6c0..077f522d41 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc @@ -358,7 +358,7 @@ RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() { void RtpVideoStreamReceiver2::AddReceiveCodec( uint8_t payload_type, VideoCodecType video_codec, - const std::map<std::string, std::string>& codec_params, + const webrtc::CodecParameterMap& codec_params, bool raw_payload) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0 || @@ -433,23 +433,21 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( const RtpPacketReceived& rtp_packet, RTPVideoHeader* video_header) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) { - webrtc::DependencyDescriptor dependency_descriptor; + if (DependencyDescriptorMandatory dd_mandatory; + rtp_packet.GetExtension<RtpDependencyDescriptorExtensionMandatory>( + &dd_mandatory)) { + const int64_t frame_id = + frame_id_unwrapper_.Unwrap(dd_mandatory.frame_number()); + DependencyDescriptor dependency_descriptor; if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>( video_structure_.get(), &dependency_descriptor)) { - // Descriptor is there, but failed to parse. Either it is invalid, - // or too old packet (after relevant video_structure_ changed), - // or too new packet (before relevant video_structure_ arrived). - // Drop such packet to be on the safe side. - // TODO(bugs.webrtc.org/10342): Stash too new packet. - Timestamp now = clock_->CurrentTime(); - if (now - last_logged_failed_to_parse_dd_ > TimeDelta::Seconds(1)) { - last_logged_failed_to_parse_dd_ = now; - RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() - << " Failed to parse dependency descriptor."; + if (!video_structure_frame_id_ || frame_id < video_structure_frame_id_) { + return kDropPacket; + } else { + return kStashPacket; } - return kDropPacket; } + if (dependency_descriptor.attached_structure != nullptr && !dependency_descriptor.first_packet_in_frame) { RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() @@ -462,8 +460,6 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( video_header->is_last_packet_in_frame = dependency_descriptor.last_packet_in_frame; - int64_t frame_id = - frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number); auto& generic_descriptor_info = video_header->generic.emplace(); generic_descriptor_info.frame_id = frame_id; generic_descriptor_info.spatial_index = @@ -538,10 +534,11 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( return kHasGenericDescriptor; } -void RtpVideoStreamReceiver2::OnReceivedPayloadData( +bool RtpVideoStreamReceiver2::OnReceivedPayloadData( rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video) { + const RTPVideoHeader& video, + int times_nacked) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); auto packet = @@ -594,16 +591,23 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( video_header.playout_delay = rtp_packet.GetExtension<PlayoutDelayLimits>(); } - ParseGenericDependenciesResult generic_descriptor_state = - ParseGenericDependenciesExtension(rtp_packet, &video_header); - if (!rtp_packet.recovered()) { UpdatePacketReceiveTimestamps( rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); } - if (generic_descriptor_state == kDropPacket) { + ParseGenericDependenciesResult generic_descriptor_state = + ParseGenericDependenciesExtension(rtp_packet, &video_header); + + if (generic_descriptor_state == kStashPacket) { + return true; + } else if (generic_descriptor_state == kDropPacket) { Timestamp now = clock_->CurrentTime(); + if (now - last_logged_failed_to_parse_dd_ > TimeDelta::Seconds(1)) { + last_logged_failed_to_parse_dd_ = now; + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << " Failed to parse dependency descriptor."; + } if (video_structure_ == nullptr && next_keyframe_request_for_missing_video_structure_ < now) { // No video structure received yet, most likely part of the initial @@ -612,7 +616,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( next_keyframe_request_for_missing_video_structure_ = now + TimeDelta::Seconds(1); } - return; + return false; } // Color space should only be transmitted in the last packet of a frame, @@ -658,21 +662,12 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( } } - if (nack_module_) { - const bool is_keyframe = - video_header.is_first_packet_in_frame && - video_header.frame_type == VideoFrameType::kVideoFrameKey; - - packet->times_nacked = nack_module_->OnReceivedPacket( - rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered()); - } else { - packet->times_nacked = -1; - } + packet->times_nacked = times_nacked; if (codec_payload.size() == 0) { NotifyReceiverOfEmptyPacket(packet->seq_num); rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - return; + return false; } if (packet->codec() == kVideoCodecH264) { @@ -695,7 +690,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); [[fallthrough]]; case video_coding::H264SpsPpsTracker::kDrop: - return; + return false; case video_coding::H264SpsPpsTracker::kInsert: packet->video_payload = std::move(fixed.bitstream); break; @@ -708,6 +703,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); frame_counter_.Add(packet->timestamp); OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet))); + return false; } void RtpVideoStreamReceiver2::OnRecoveredPacket( @@ -1111,15 +1107,51 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { if (type_it == payload_type_map_.end()) { return; } - absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload = - type_it->second->Parse(packet.PayloadBuffer()); - if (parsed_payload == absl::nullopt) { - RTC_LOG(LS_WARNING) << "Failed parsing payload."; - return; - } - OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet, - parsed_payload->video_header); + auto parse_and_insert = [&](const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload = + type_it->second->Parse(packet.PayloadBuffer()); + if (parsed_payload == absl::nullopt) { + RTC_LOG(LS_WARNING) << "Failed parsing payload."; + return false; + } + + int times_nacked = nack_module_ + ? nack_module_->OnReceivedPacket( + packet.SequenceNumber(), packet.recovered()) + : -1; + + return OnReceivedPayloadData(std::move(parsed_payload->video_payload), + packet, parsed_payload->video_header, + times_nacked); + }; + + // When the dependency descriptor is used and the descriptor fail to parse + // then `OnReceivedPayloadData` may return true to signal the the packet + // should be retried at a later stage, which is why they are stashed here. + // + // TODO(bugs.webrtc.org/15782): + // This is an ugly solution. The way things should work is for the + // `RtpFrameReferenceFinder` to stash assembled frames until the keyframe with + // the relevant template structure has been received, but unfortunately the + // `frame_transformer_delegate_` is called before the frames are inserted into + // the `RtpFrameReferenceFinder`, and it expects the dependency descriptor to + // be parsed at that stage. + if (parse_and_insert(packet)) { + if (stashed_packets_.size() == 100) { + stashed_packets_.clear(); + } + stashed_packets_.push_back(packet); + } else { + for (auto it = stashed_packets_.begin(); it != stashed_packets_.end();) { + if (parse_and_insert(*it)) { + ++it; // keep in the stash. + } else { + it = stashed_packets_.erase(it); + } + } + } } void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( @@ -1151,8 +1183,7 @@ void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); if (nack_module_) { - nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false, - /* is _recovered = */ false); + nack_module_->OnReceivedPacket(seq_num, /*is_recovered=*/false); } if (loss_notification_controller_) { // TODO(bugs.webrtc.org/10336): Handle empty packets. diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver2.h b/third_party/libwebrtc/video/rtp_video_stream_receiver2.h index 10329005ba..b942cb97a6 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver2.h +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver2.h @@ -104,7 +104,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void AddReceiveCodec(uint8_t payload_type, VideoCodecType video_codec, - const std::map<std::string, std::string>& codec_params, + const webrtc::CodecParameterMap& codec_params, bool raw_payload); void RemoveReceiveCodec(uint8_t payload_type); @@ -135,9 +135,11 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void OnRtpPacket(const RtpPacketReceived& packet) override; // Public only for tests. - void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, + // Returns true if the packet should be stashed and retried at a later stage. + bool OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video); + const RTPVideoHeader& video, + int times_nacked); // Implements RecoveredPacketReceiver. void OnRecoveredPacket(const RtpPacketReceived& packet) override; @@ -288,6 +290,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, RTC_GUARDED_BY(packet_sequence_checker_); }; enum ParseGenericDependenciesResult { + kStashPacket, kDropPacket, kHasGenericDescriptor, kNoGenericDescriptor @@ -403,7 +406,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // TODO(johan): Remove pt_codec_params_ once // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved. // Maps a payload type to a map of out-of-band supplied codec parameters. - std::map<uint8_t, std::map<std::string, std::string>> pt_codec_params_ + std::map<uint8_t, webrtc::CodecParameterMap> pt_codec_params_ RTC_GUARDED_BY(packet_sequence_checker_); int16_t last_payload_type_ RTC_GUARDED_BY(packet_sequence_checker_) = -1; @@ -440,6 +443,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, RTC_GUARDED_BY(packet_sequence_checker_); std::map<int64_t, RtpPacketInfo> packet_infos_ RTC_GUARDED_BY(packet_sequence_checker_); + std::vector<RtpPacketReceived> stashed_packets_ + RTC_GUARDED_BY(packet_sequence_checker_); Timestamp next_keyframe_request_for_missing_video_structure_ = Timestamp::MinusInfinity(); diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver2_unittest.cc b/third_party/libwebrtc/video/rtp_video_stream_receiver2_unittest.cc index d82f7bb9a5..f039bf29b1 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver2_unittest.cc +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver2_unittest.cc @@ -118,7 +118,7 @@ class MockOnCompleteFrameCallback void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) { // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*. - buffer_.WriteBytes(reinterpret_cast<const char*>(data), size_in_bytes); + buffer_.WriteBytes(data, size_in_bytes); } rtc::ByteBufferWriter buffer_; }; @@ -307,7 +307,7 @@ TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) { received_packet_generator.SetColorSpace(kColorSpace); // Prepare the receiver for VP9. - std::map<std::string, std::string> codec_params; + webrtc::CodecParameterMap codec_params; rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, kVideoCodecVP9, codec_params, /*raw_payload=*/false); @@ -368,7 +368,7 @@ TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) { data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, SetProtectionPayloadTypes) { @@ -407,7 +407,7 @@ TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) { ElementsAre(kAbsoluteCaptureTimestamp)); })); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, @@ -436,7 +436,7 @@ TEST_F(RtpVideoStreamReceiver2Test, data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); // Rtp packet without absolute capture time. rtp_packet = RtpPacketReceived(&extension_map); @@ -453,7 +453,7 @@ TEST_F(RtpVideoStreamReceiver2Test, EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1)); })); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, @@ -508,7 +508,7 @@ TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) { EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrameFailBitstream(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } class RtpVideoStreamReceiver2TestH264 @@ -536,7 +536,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), sps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, - sps_video_header); + sps_video_header, 0); rtc::CopyOnWriteBuffer pps_data; RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); @@ -549,7 +549,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), pps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, - pps_video_header); + pps_video_header, 0); rtc::CopyOnWriteBuffer idr_data; RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); @@ -566,12 +566,12 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { idr_data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); } TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) { constexpr int kPayloadType = 99; - std::map<std::string, std::string> codec_params; + webrtc::CodecParameterMap codec_params; // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2 // . codec_params.insert( @@ -607,12 +607,12 @@ TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) { data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { constexpr int kPayloadType = 99; - std::map<std::string, std::string> codec_params; + webrtc::CodecParameterMap codec_params; if (GetParam() == "") { // Forcing can be done either with field trial or codec_params. codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""}); @@ -633,7 +633,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), sps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, - sps_video_header); + sps_video_header, 0); rtc::CopyOnWriteBuffer pps_data; RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); @@ -646,7 +646,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), pps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, - pps_video_header); + pps_video_header, 0); rtc::CopyOnWriteBuffer idr_data; RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); @@ -665,7 +665,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { .WillOnce( [&](EncodedFrame* frame) { EXPECT_TRUE(frame->is_keyframe()); }); rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); mock_on_complete_frame_callback_.ClearExpectedBitstream(); mock_on_complete_frame_callback_.AppendExpectedBitstream( kH264StartCode, sizeof(kH264StartCode)); @@ -676,7 +676,7 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { .WillOnce( [&](EncodedFrame* frame) { EXPECT_FALSE(frame->is_keyframe()); }); rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) { @@ -694,26 +694,26 @@ TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) { EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(3); rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(4); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); video_header.frame_type = VideoFrameType::kVideoFrameDelta; rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(6); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_packet.SetSequenceNumber(5); rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) { @@ -725,7 +725,7 @@ TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) { GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(1)); } @@ -744,12 +744,12 @@ TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) { while (rtp_packet.SequenceNumber() - start_sequence_number < kPacketBufferMaxSize) { rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2); } rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(1)); } @@ -1144,6 +1144,103 @@ TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(2)); } +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + RetryStashedPacketsAfterReceivingScalabilityStructure) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique<FrameDependencyStructure>(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique<FrameDependencyStructure>(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 2; + + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure2.templates[1]; + deltaframe_descriptor.frame_number = 3; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 1); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 2); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 3); }); + + InjectPacketWith(stream_structure1, keyframe1_descriptor); + InjectPacketWith(stream_structure2, deltaframe_descriptor); + InjectPacketWith(stream_structure2, keyframe2_descriptor); +} + +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + RetryStashedPacketsAfterReceivingEarlierScalabilityStructure) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + FrameDependencyStructure stream_structure3 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + stream_structure3.structure_id = + stream_structure2.structure_id + stream_structure2.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique<FrameDependencyStructure>(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique<FrameDependencyStructure>(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 2; + + DependencyDescriptor deltaframe2_descriptor; + deltaframe2_descriptor.frame_dependencies = stream_structure2.templates[1]; + deltaframe2_descriptor.frame_number = 3; + + DependencyDescriptor keyframe3_descriptor; + keyframe3_descriptor.attached_structure = + std::make_unique<FrameDependencyStructure>(stream_structure3); + keyframe3_descriptor.frame_dependencies = stream_structure3.templates[0]; + keyframe3_descriptor.frame_number = 4; + + DependencyDescriptor deltaframe3_descriptor; + deltaframe3_descriptor.frame_dependencies = stream_structure3.templates[1]; + deltaframe3_descriptor.frame_number = 5; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 1); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 2); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 3); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 4); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 5); }); + + InjectPacketWith(stream_structure1, keyframe1_descriptor); + InjectPacketWith(stream_structure2, deltaframe2_descriptor); + InjectPacketWith(stream_structure3, deltaframe3_descriptor); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + InjectPacketWith(stream_structure3, keyframe3_descriptor); +} + TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) { rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer = rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>(); @@ -1166,7 +1263,7 @@ TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) { mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); EXPECT_CALL(*mock_frame_transformer, Transform(_)); - receiver->OnReceivedPayloadData(data, rtp_packet, video_header); + receiver->OnReceivedPayloadData(data, rtp_packet, video_header, 0); EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc)); @@ -1233,7 +1330,7 @@ TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) { EXPECT_EQ(frame->EncodedImage().PlayoutDelay(), expected_playout_delay); })); rtp_video_stream_receiver_->OnReceivedPayloadData( - received_packet.PayloadBuffer(), received_packet, video_header); + received_packet.PayloadBuffer(), received_packet, video_header, 0); } } // namespace webrtc diff --git a/third_party/libwebrtc/video/video_gn/moz.build b/third_party/libwebrtc/video/video_gn/moz.build index 5e3d75d621..1106f274c2 100644 --- a/third_party/libwebrtc/video/video_gn/moz.build +++ b/third_party/libwebrtc/video/video_gn/moz.build @@ -49,7 +49,6 @@ UNIFIED_SOURCES += [ "/third_party/libwebrtc/video/transport_adapter.cc", "/third_party/libwebrtc/video/video_quality_observer2.cc", "/third_party/libwebrtc/video/video_receive_stream2.cc", - "/third_party/libwebrtc/video/video_send_stream.cc", "/third_party/libwebrtc/video/video_send_stream_impl.cc", "/third_party/libwebrtc/video/video_stream_decoder2.cc" ] diff --git a/third_party/libwebrtc/video/video_receive_stream2.cc b/third_party/libwebrtc/video/video_receive_stream2.cc index 33e2f39ced..8675ab9979 100644 --- a/third_party/libwebrtc/video/video_receive_stream2.cc +++ b/third_party/libwebrtc/video/video_receive_stream2.cc @@ -177,31 +177,30 @@ TimeDelta DetermineMaxWaitForFrame(TimeDelta rtp_history, bool is_keyframe) { } VideoReceiveStream2::VideoReceiveStream2( - TaskQueueFactory* task_queue_factory, + const Environment& env, Call* call, int num_cpu_cores, PacketRouter* packet_router, VideoReceiveStreamInterface::Config config, CallStats* call_stats, - Clock* clock, std::unique_ptr<VCMTiming> timing, NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync, - RtcEventLog* event_log) - : task_queue_factory_(task_queue_factory), + DecodeSynchronizer* decode_sync) + : env_(env), + packet_sequence_checker_(SequenceChecker::kDetached), + decode_sequence_checker_(SequenceChecker::kDetached), transport_adapter_(config.rtcp_send_transport), config_(std::move(config)), num_cpu_cores_(num_cpu_cores), call_(call), - clock_(clock), call_stats_(call_stats), - source_tracker_(clock_), - stats_proxy_(remote_ssrc(), clock_, call->worker_thread()), - rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + source_tracker_(&env_.clock()), + stats_proxy_(remote_ssrc(), &env_.clock(), call->worker_thread()), + rtp_receive_statistics_(ReceiveStatistics::Create(&env_.clock())), timing_(std::move(timing)), - video_receiver_(clock_, timing_.get(), call->trials()), + video_receiver_(&env_.clock(), timing_.get(), env_.field_trials()), rtp_video_stream_receiver_(call->worker_thread(), - clock_, + &env_.clock(), &transport_adapter_, call_stats->AsRtcpRttStats(), packet_router, @@ -214,8 +213,8 @@ VideoReceiveStream2::VideoReceiveStream2( this, // OnCompleteFrameCallback std::move(config_.frame_decryptor), std::move(config_.frame_transformer), - call->trials(), - event_log), + env_.field_trials(), + &env_.event_log()), rtp_stream_sync_(call->worker_thread(), this), max_wait_for_keyframe_(DetermineMaxWaitForFrame( TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), @@ -223,7 +222,7 @@ VideoReceiveStream2::VideoReceiveStream2( max_wait_for_frame_(DetermineMaxWaitForFrame( TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), false)), - decode_queue_(task_queue_factory_->CreateTaskQueue( + decode_queue_(env_.task_queue_factory().CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString(); @@ -231,7 +230,6 @@ VideoReceiveStream2::VideoReceiveStream2( RTC_DCHECK(call_->worker_thread()); RTC_DCHECK(config_.renderer); RTC_DCHECK(call_stats_); - packet_sequence_checker_.Detach(); RTC_DCHECK(!config_.decoders.empty()); RTC_CHECK(config_.decoder_factory); @@ -249,11 +247,11 @@ VideoReceiveStream2::VideoReceiveStream2( std::unique_ptr<FrameDecodeScheduler> scheduler = decode_sync ? decode_sync->CreateSynchronizedFrameScheduler() : std::make_unique<TaskQueueFrameDecodeScheduler>( - clock, call_->worker_thread()); + &env_.clock(), call_->worker_thread()); buffer_ = std::make_unique<VideoStreamBufferController>( - clock_, call_->worker_thread(), timing_.get(), &stats_proxy_, this, + &env_.clock(), call_->worker_thread(), timing_.get(), &stats_proxy_, this, max_wait_for_keyframe_, max_wait_for_frame_, std::move(scheduler), - call_->trials()); + env_.field_trials()); if (!config_.rtp.rtx_associated_payload_types.empty()) { rtx_receive_stream_ = std::make_unique<RtxReceiveStream>( @@ -346,7 +344,7 @@ void VideoReceiveStream2::Start() { rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr; if (config_.enable_prerenderer_smoothing) { incoming_video_stream_.reset(new IncomingVideoStream( - task_queue_factory_, config_.render_delay_ms, this)); + &env_.task_queue_factory(), config_.render_delay_ms, this)); renderer = incoming_video_stream_.get(); } else { renderer = this; @@ -357,7 +355,7 @@ void VideoReceiveStream2::Start() { settings.set_codec_type( PayloadStringToCodecType(decoder.video_format.name)); settings.set_max_render_resolution( - InitialDecoderResolution(call_->trials())); + InitialDecoderResolution(env_.field_trials())); settings.set_number_of_cores(num_cpu_cores_); const bool raw_payload = @@ -382,8 +380,8 @@ void VideoReceiveStream2::Start() { // Start decoding on task queue. stats_proxy_.DecoderThreadStarting(); - decode_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&decode_queue_); + decode_queue_->PostTask([this] { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); decoder_stopped_ = false; }); buffer_->StartNextDecode(true); @@ -413,8 +411,8 @@ void VideoReceiveStream2::Stop() { if (decoder_running_) { rtc::Event done; - decode_queue_.PostTask([this, &done] { - RTC_DCHECK_RUN_ON(&decode_queue_); + decode_queue_->PostTask([this, &done] { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); // Set `decoder_stopped_` before deregistering all decoders. This means // that any pending encoded frame will return early without trying to // access the decoder database. @@ -532,7 +530,7 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( } std::string decoded_output_file = - call_->trials().Lookup("WebRTC-DecoderDataDumpDirectory"); + env_.field_trials().Lookup("WebRTC-DecoderDataDumpDirectory"); // Because '/' can't be used inside a field trial parameter, we use ';' // instead. // This is only relevant to WebRTC-DecoderDataDumpDirectory @@ -640,7 +638,7 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { // renderer. Frame may or may be not rendered by this time. This results in // inaccuracy but is still the best we can do in the absence of "frame // rendered" callback from the renderer. - VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + VideoFrameMetaData frame_meta(video_frame, env_.clock().CurrentTime()); call_->worker_thread()->PostTask( SafeTask(task_safety_.flag(), [frame_meta, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); @@ -759,7 +757,7 @@ bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr<EncodedFrame> frame) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); const bool keyframe_request_is_due = !last_keyframe_request_ || now >= (*last_keyframe_request_ + max_wait_for_keyframe_); @@ -776,10 +774,10 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr<EncodedFrame> frame) { } stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); - decode_queue_.PostTask([this, now, keyframe_request_is_due, - received_frame_is_keyframe, frame = std::move(frame), - keyframe_required = keyframe_required_]() mutable { - RTC_DCHECK_RUN_ON(&decode_queue_); + decode_queue_->PostTask([this, now, keyframe_request_is_due, + received_frame_is_keyframe, frame = std::move(frame), + keyframe_required = keyframe_required_]() mutable { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); if (decoder_stopped_) return; DecodeFrameResult result = HandleEncodedFrameOnDecodeQueue( @@ -808,7 +806,7 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr<EncodedFrame> frame) { void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); absl::optional<int64_t> last_packet_ms = rtp_video_stream_receiver_.LastReceivedPacketMs(); @@ -843,7 +841,7 @@ VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue( std::unique_ptr<EncodedFrame> frame, bool keyframe_request_is_due, bool keyframe_required) { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); bool force_request_key_frame = false; absl::optional<int64_t> decoded_frame_picture_id; @@ -885,7 +883,7 @@ VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue( int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( std::unique_ptr<EncodedFrame> frame) { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); // If `buffered_encoded_frames_` grows out of control (=60 queued frames), // maybe due to a stuck decoder, we just halt the process here and log the @@ -1064,14 +1062,14 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, last_keyframe_request = last_keyframe_request_; last_keyframe_request_ = generate_key_frame - ? clock_->CurrentTime() + ? env_.clock().CurrentTime() : Timestamp::Millis(state.last_keyframe_request_ms.value_or(0)); } - decode_queue_.PostTask( + decode_queue_->PostTask( [this, &event, &old_state, callback = std::move(state.callback), last_keyframe_request = std::move(last_keyframe_request)] { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); old_state.callback = std::move(encoded_frame_buffer_function_); encoded_frame_buffer_function_ = std::move(callback); @@ -1096,7 +1094,7 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, void VideoReceiveStream2::GenerateKeyFrame() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - RequestKeyFrame(clock_->CurrentTime()); + RequestKeyFrame(env_.clock().CurrentTime()); keyframe_generation_requested_ = true; } diff --git a/third_party/libwebrtc/video/video_receive_stream2.h b/third_party/libwebrtc/video/video_receive_stream2.h index 31b9a7eb7c..cfdea630b0 100644 --- a/third_party/libwebrtc/video/video_receive_stream2.h +++ b/third_party/libwebrtc/video/video_receive_stream2.h @@ -17,9 +17,10 @@ #include <vector> #include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" @@ -31,9 +32,7 @@ #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" #include "video/receive_statistics_proxy.h" #include "video/rtp_streams_synchronizer2.h" #include "video/rtp_video_stream_receiver2.h" @@ -94,17 +93,15 @@ class VideoReceiveStream2 // configured. static constexpr size_t kBufferedEncodedFramesMaxSize = 60; - VideoReceiveStream2(TaskQueueFactory* task_queue_factory, + VideoReceiveStream2(const Environment& env, Call* call, int num_cpu_cores, PacketRouter* packet_router, VideoReceiveStreamInterface::Config config, CallStats* call_stats, - Clock* clock, std::unique_ptr<VCMTiming> timing, NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync, - RtcEventLog* event_log); + DecodeSynchronizer* decode_sync); // Destruction happens on the worker thread. Prior to destruction the caller // must ensure that a registration with the transport has been cleared. See // `RegisterWithTransport` for details. @@ -227,7 +224,7 @@ class VideoReceiveStream2 DecodeFrameResult HandleEncodedFrameOnDecodeQueue( std::unique_ptr<EncodedFrame> frame, bool keyframe_request_is_due, - bool keyframe_required) RTC_RUN_ON(decode_queue_); + bool keyframe_required) RTC_RUN_ON(decode_sequence_checker_); void UpdatePlayoutDelays() const RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_); void RequestKeyFrame(Timestamp now) RTC_RUN_ON(packet_sequence_checker_); @@ -239,10 +236,12 @@ class VideoReceiveStream2 bool IsReceivingKeyFrame(Timestamp timestamp) const RTC_RUN_ON(packet_sequence_checker_); int DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr<EncodedFrame> frame) - RTC_RUN_ON(decode_queue_); + RTC_RUN_ON(decode_sequence_checker_); void UpdateHistograms(); + const Environment env_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; // TODO(bugs.webrtc.org/11993): This checker conceptually represents // operations that belong to the network thread. The Call class is currently @@ -253,18 +252,17 @@ class VideoReceiveStream2 // on the network thread, this comment will be deleted. RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; - TaskQueueFactory* const task_queue_factory_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker decode_sequence_checker_; TransportAdapter transport_adapter_; const VideoReceiveStreamInterface::Config config_; const int num_cpu_cores_; Call* const call_; - Clock* const clock_; CallStats* const call_stats_; bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false; - bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true; + bool decoder_stopped_ RTC_GUARDED_BY(decode_sequence_checker_) = true; SourceTracker source_tracker_; ReceiveStatisticsProxy stats_proxy_; @@ -300,7 +298,7 @@ class VideoReceiveStream2 bool keyframe_required_ RTC_GUARDED_BY(packet_sequence_checker_) = true; // If we have successfully decoded any frame. - bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false; + bool frame_decoded_ RTC_GUARDED_BY(decode_sequence_checker_) = false; absl::optional<Timestamp> last_keyframe_request_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -329,7 +327,7 @@ class VideoReceiveStream2 // Function that is triggered with encoded frames, if not empty. std::function<void(const RecordableEncodedFrame&)> - encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_); + encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_sequence_checker_); // Set to true while we're requesting keyframes but not yet received one. bool keyframe_generation_requested_ RTC_GUARDED_BY(packet_sequence_checker_) = false; @@ -342,13 +340,16 @@ class VideoReceiveStream2 RTC_GUARDED_BY(pending_resolution_mutex_); // Buffered encoded frames held while waiting for decoded resolution. std::vector<std::unique_ptr<EncodedFrame>> buffered_encoded_frames_ - RTC_GUARDED_BY(decode_queue_); - - // Defined last so they are destroyed before all other members. - rtc::TaskQueue decode_queue_; + RTC_GUARDED_BY(decode_sequence_checker_); // Used to signal destruction to potentially pending tasks. ScopedTaskSafety task_safety_; + + // Defined last so they are destroyed before all other members, in particular + // `decode_queue_` should be stopped before `decode_sequence_checker_` is + // destructed to avoid races when running tasks on the `decode_queue_` during + // VideoReceiveStream2 destruction. + std::unique_ptr<TaskQueueBase, TaskQueueDeleter> decode_queue_; }; } // namespace internal diff --git a/third_party/libwebrtc/video/video_receive_stream2_unittest.cc b/third_party/libwebrtc/video/video_receive_stream2_unittest.cc index 084b128af8..50e00aa31b 100644 --- a/third_party/libwebrtc/video/video_receive_stream2_unittest.cc +++ b/third_party/libwebrtc/video/video_receive_stream2_unittest.cc @@ -23,6 +23,8 @@ #include "absl/memory/memory.h" #include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/metronome/test/fake_metronome.h" #include "api/test/mock_video_decoder.h" #include "api/test/mock_video_decoder_factory.h" @@ -192,12 +194,13 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam<bool> { VideoReceiveStream2Test() : time_controller_(kStartTime), - clock_(time_controller_.GetClock()), + env_(CreateEnvironment(time_controller_.CreateTaskQueueFactory(), + time_controller_.GetClock())), config_(&mock_transport_, &mock_h264_decoder_factory_), - call_stats_(clock_, time_controller_.GetMainThread()), + call_stats_(&env_.clock(), time_controller_.GetMainThread()), fake_renderer_(&time_controller_), fake_metronome_(TimeDelta::Millis(16)), - decode_sync_(clock_, + decode_sync_(&env_.clock(), &fake_metronome_, time_controller_.GetMainThread()), h264_decoder_factory_(&mock_decoder_) { @@ -255,13 +258,13 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam<bool> { video_receive_stream_->UnregisterFromTransport(); video_receive_stream_ = nullptr; } - timing_ = new VCMTiming(clock_, fake_call_.trials()); + timing_ = new VCMTiming(&env_.clock(), env_.field_trials()); video_receive_stream_ = std::make_unique<webrtc::internal::VideoReceiveStream2>( - time_controller_.GetTaskQueueFactory(), &fake_call_, - kDefaultNumCpuCores, &packet_router_, config_.Copy(), &call_stats_, - clock_, absl::WrapUnique(timing_), &nack_periodic_processor_, - UseMetronome() ? &decode_sync_ : nullptr, nullptr); + env_, &fake_call_, kDefaultNumCpuCores, &packet_router_, + config_.Copy(), &call_stats_, absl::WrapUnique(timing_), + &nack_periodic_processor_, + UseMetronome() ? &decode_sync_ : nullptr); video_receive_stream_->RegisterWithTransport( &rtp_stream_receiver_controller_); if (state) @@ -270,7 +273,7 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam<bool> { protected: GlobalSimulatedTimeController time_controller_; - Clock* const clock_; + Environment env_; NackPeriodicProcessor nack_periodic_processor_; testing::NiceMock<MockVideoDecoderFactory> mock_h264_decoder_factory_; VideoReceiveStreamInterface::Config config_; @@ -542,16 +545,16 @@ TEST_P(VideoReceiveStream2Test, RenderedFrameUpdatesGetSources) { info.set_csrcs({kCsrc}); info.set_rtp_timestamp(kRtpTimestamp); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(5000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(5000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(3000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(3000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(2000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(2000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(1000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(1000)); infos.push_back(info); packet_infos = RtpPacketInfos(std::move(infos)); @@ -563,12 +566,12 @@ TEST_P(VideoReceiveStream2Test, RenderedFrameUpdatesGetSources) { EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty()); // Render one video frame. - Timestamp timestamp_min = clock_->CurrentTime(); + Timestamp timestamp_min = env_.clock().CurrentTime(); video_receive_stream_->OnCompleteFrame(std::move(test_frame)); // Verify that the per-packet information is passed to the renderer. EXPECT_THAT(fake_renderer_.WaitForFrame(kDefaultTimeOut), RenderedFrameWith(PacketInfos(ElementsAreArray(packet_infos)))); - Timestamp timestamp_max = clock_->CurrentTime(); + Timestamp timestamp_max = env_.clock().CurrentTime(); // Verify that the per-packet information also updates `GetSources()`. std::vector<RtpSource> sources = video_receive_stream_->GetSources(); @@ -813,15 +816,15 @@ TEST_P(VideoReceiveStream2Test, FramesScheduledInOrder) { EXPECT_CALL(mock_decoder_, Decode(test::RtpTimestamp(RtpTimestampForFrame(2)), _)) .Times(1); - key_frame->SetReceivedTime(clock_->CurrentTime().ms()); + key_frame->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(key_frame)); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), RenderedFrame()); - delta_frame2->SetReceivedTime(clock_->CurrentTime().ms()); + delta_frame2->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(delta_frame2)); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), DidNotReceiveFrame()); // `delta_frame1` arrives late. - delta_frame1->SetReceivedTime(clock_->CurrentTime().ms()); + delta_frame1->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(delta_frame1)); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), RenderedFrame()); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay * 2), RenderedFrame()); @@ -854,7 +857,7 @@ TEST_P(VideoReceiveStream2Test, WaitsforAllSpatialLayers) { // No decodes should be called until `sl2` is received. EXPECT_CALL(mock_decoder_, Decode(_, _)).Times(0); - sl0->SetReceivedTime(clock_->CurrentTime().ms()); + sl0->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(sl0)); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), DidNotReceiveFrame()); @@ -984,7 +987,7 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { .Id(0) .PayloadType(99) .Time(kBaseRtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), RenderedFrame()); @@ -994,7 +997,7 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { .Id(1) .PayloadType(99) .Time(kBaseRtp + k30FpsRtpTimestampDelta) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), RenderedFrame()); @@ -1014,7 +1017,7 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { .Id(2) .PayloadType(99) .Time(kWrapAroundRtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_CALL(mock_decoder_, Decode(test::RtpTimestamp(kWrapAroundRtp), _)) @@ -1067,10 +1070,11 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { // 2 second of frames at 15 fps, and then a keyframe. time_controller_.AdvanceTime(k30FpsDelay); - Timestamp send_30fps_end_time = clock_->CurrentTime() + TimeDelta::Seconds(2); + Timestamp send_30fps_end_time = + env_.clock().CurrentTime() + TimeDelta::Seconds(2); int id = 3; EXPECT_CALL(mock_transport_, SendRtcp).Times(AnyNumber()); - while (clock_->CurrentTime() < send_30fps_end_time) { + while (env_.clock().CurrentTime() < send_30fps_end_time) { ++id; video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() @@ -1085,8 +1089,9 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { Eq(absl::nullopt)); } uint32_t current_rtp = RtpTimestampForFrame(id); - Timestamp send_15fps_end_time = clock_->CurrentTime() + TimeDelta::Seconds(2); - while (clock_->CurrentTime() < send_15fps_end_time) { + Timestamp send_15fps_end_time = + env_.clock().CurrentTime() + TimeDelta::Seconds(2); + while (env_.clock().CurrentTime() < send_15fps_end_time) { ++id; current_rtp += k15FpsRtpTimestampDelta; video_receive_stream_->OnCompleteFrame( @@ -1094,7 +1099,7 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { .Id(id) .PayloadType(99) .Time(current_rtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .Refs({id - 1}) .AsLast() .Build()); @@ -1112,7 +1117,7 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { .Id(id) .PayloadType(99) .Time(current_rtp) - .ReceivedTime(clock_->CurrentTime() + kKeyframeDelay) + .ReceivedTime(env_.clock().CurrentTime() + kKeyframeDelay) .AsLast() .Build()); // If the framerate was not updated to be 15fps from the frames that arrived @@ -1166,7 +1171,7 @@ TEST_P(VideoReceiveStream2Test, StreamShouldNotTimeoutWhileWaitingForFrame) { .Id(121) .PayloadType(99) .Time(late_decode_rtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Millis(100), diff --git a/third_party/libwebrtc/video/video_send_stream.cc b/third_party/libwebrtc/video/video_send_stream.cc deleted file mode 100644 index b99b08eefb..0000000000 --- a/third_party/libwebrtc/video/video_send_stream.cc +++ /dev/null @@ -1,344 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "video/video_send_stream.h" - -#include <utility> - -#include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" -#include "api/video/video_stream_encoder_settings.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" -#include "modules/rtp_rtcp/source/rtp_sender.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/clock.h" -#include "video/adaptation/overuse_frame_detector.h" -#include "video/frame_cadence_adapter.h" -#include "video/video_stream_encoder.h" - -namespace webrtc { - -namespace { - -size_t CalculateMaxHeaderSize(const RtpConfig& config) { - size_t header_size = kRtpHeaderSize; - size_t extensions_size = 0; - size_t fec_extensions_size = 0; - if (!config.extensions.empty()) { - RtpHeaderExtensionMap extensions_map(config.extensions); - extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(), - extensions_map); - fec_extensions_size = - RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map); - } - header_size += extensions_size; - if (config.flexfec.payload_type >= 0) { - // All FEC extensions again plus maximum FlexFec overhead. - header_size += fec_extensions_size + 32; - } else { - if (config.ulpfec.ulpfec_payload_type >= 0) { - // Header with all the FEC extensions will be repeated plus maximum - // UlpFec overhead. - header_size += fec_extensions_size + 18; - } - if (config.ulpfec.red_payload_type >= 0) { - header_size += 1; // RED header. - } - } - // Additional room for Rtx. - if (config.rtx.payload_type >= 0) - header_size += kRtxHeaderSize; - return header_size; -} - -VideoStreamEncoder::BitrateAllocationCallbackType -GetBitrateAllocationCallbackType(const VideoSendStream::Config& config, - const FieldTrialsView& field_trials) { - if (webrtc::RtpExtension::FindHeaderExtensionByUri( - config.rtp.extensions, - webrtc::RtpExtension::kVideoLayersAllocationUri, - config.crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? RtpExtension::Filter::kPreferEncryptedExtension - : RtpExtension::Filter::kDiscardEncryptedExtension)) { - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoLayersAllocation; - } - if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocation; - } - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocationWhenScreenSharing; -} - -RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( - const VideoSendStream::Config* config) { - RtpSenderFrameEncryptionConfig frame_encryption_config; - frame_encryption_config.frame_encryptor = config->frame_encryptor.get(); - frame_encryption_config.crypto_options = config->crypto_options; - return frame_encryption_config; -} - -RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, - EncoderRtcpFeedback* encoder_feedback, - SendStatisticsProxy* stats_proxy, - SendPacketObserver* send_packet_observer) { - RtpSenderObservers observers; - observers.rtcp_rtt_stats = call_stats; - observers.intra_frame_callback = encoder_feedback; - observers.rtcp_loss_notification_observer = encoder_feedback; - observers.report_block_data_observer = stats_proxy; - observers.rtp_stats = stats_proxy; - observers.bitrate_observer = stats_proxy; - observers.frame_count_observer = stats_proxy; - observers.rtcp_type_observer = stats_proxy; - observers.send_packet_observer = send_packet_observer; - return observers; -} - -std::unique_ptr<VideoStreamEncoder> CreateVideoStreamEncoder( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - SendStatisticsProxy* stats_proxy, - const VideoStreamEncoderSettings& encoder_settings, - VideoStreamEncoder::BitrateAllocationCallbackType - bitrate_allocation_callback_type, - const FieldTrialsView& field_trials, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { - std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue = - task_queue_factory->CreateTaskQueue("EncoderQueue", - TaskQueueFactory::Priority::NORMAL); - TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); - return std::make_unique<VideoStreamEncoder>( - clock, num_cpu_cores, stats_proxy, encoder_settings, - std::make_unique<OveruseFrameDetector>(stats_proxy), - FrameCadenceAdapterInterface::Create(clock, encoder_queue_ptr, - field_trials), - std::move(encoder_queue), bitrate_allocation_callback_type, field_trials, - encoder_selector); -} - -} // namespace - -namespace internal { - -VideoSendStream::VideoSendStream( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - TaskQueueBase* network_queue, - RtcpRttStats* call_stats, - RtpTransportControllerSendInterface* transport, - BitrateAllocatorInterface* bitrate_allocator, - SendDelayStats* send_delay_stats, - RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - const std::map<uint32_t, RtpState>& suspended_ssrcs, - const std::map<uint32_t, RtpPayloadState>& suspended_payload_states, - std::unique_ptr<FecController> fec_controller, - const FieldTrialsView& field_trials) - : transport_(transport), - stats_proxy_(clock, config, encoder_config.content_type, field_trials), - send_packet_observer_(&stats_proxy_, send_delay_stats), - config_(std::move(config)), - content_type_(encoder_config.content_type), - video_stream_encoder_(CreateVideoStreamEncoder( - clock, - num_cpu_cores, - task_queue_factory, - &stats_proxy_, - config_.encoder_settings, - GetBitrateAllocationCallbackType(config_, field_trials), - field_trials, - config_.encoder_selector)), - encoder_feedback_( - clock, - config_.rtp.ssrcs, - video_stream_encoder_.get(), - [this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) { - return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums); - }), - rtp_video_sender_(transport->CreateRtpVideoSender( - suspended_ssrcs, - suspended_payload_states, - config_.rtp, - config_.rtcp_report_interval_ms, - config_.send_transport, - CreateObservers(call_stats, - &encoder_feedback_, - &stats_proxy_, - &send_packet_observer_), - event_log, - std::move(fec_controller), - CreateFrameEncryptionConfig(&config_), - config_.frame_transformer)), - send_stream_(clock, - &stats_proxy_, - transport, - bitrate_allocator, - video_stream_encoder_.get(), - &config_, - encoder_config.max_bitrate_bps, - encoder_config.bitrate_priority, - encoder_config.content_type, - rtp_video_sender_, - field_trials) { - RTC_DCHECK(config_.encoder_settings.encoder_factory); - RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory); - - video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_); - - ReconfigureVideoEncoder(std::move(encoder_config)); -} - -VideoSendStream::~VideoSendStream() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(!running_); - transport_->DestroyRtpVideoSender(rtp_video_sender_); -} - -void VideoSendStream::Start() { - const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true); - StartPerRtpStream(active_layers); -} - -void VideoSendStream::StartPerRtpStream(const std::vector<bool> active_layers) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - // Keep our `running_` flag expected state in sync with active layers since - // the `send_stream_` will be implicitly stopped/started depending on the - // state of the layers. - bool running = false; - - rtc::StringBuilder active_layers_string; - active_layers_string << "{"; - for (size_t i = 0; i < active_layers.size(); ++i) { - if (active_layers[i]) { - running = true; - active_layers_string << "1"; - } else { - active_layers_string << "0"; - } - if (i < active_layers.size() - 1) { - active_layers_string << ", "; - } - } - active_layers_string << "}"; - RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str(); - send_stream_.StartPerRtpStream(active_layers); - running_ = running; -} - -void VideoSendStream::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); - if (!running_) - return; - RTC_DLOG(LS_INFO) << "VideoSendStream::Stop"; - running_ = false; - send_stream_.Stop(); -} - -bool VideoSendStream::started() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return running_; -} - -void VideoSendStream::AddAdaptationResource( - rtc::scoped_refptr<Resource> resource) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->AddAdaptationResource(resource); -} - -std::vector<rtc::scoped_refptr<Resource>> -VideoSendStream::GetAdaptationResources() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return video_stream_encoder_->GetAdaptationResources(); -} - -void VideoSendStream::SetSource( - rtc::VideoSourceInterface<webrtc::VideoFrame>* source, - const DegradationPreference& degradation_preference) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->SetSource(source, degradation_preference); -} - -void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { - ReconfigureVideoEncoder(std::move(config), nullptr); -} - -void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config, - SetParametersCallback callback) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK_EQ(content_type_, config.content_type); - RTC_LOG(LS_VERBOSE) << "Encoder config: " << config.ToString() - << " VideoSendStream config: " << config_.ToString(); - video_stream_encoder_->ConfigureEncoder( - std::move(config), - config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), - std::move(callback)); -} - -VideoSendStream::Stats VideoSendStream::GetStats() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return stats_proxy_.GetStats(); -} - -absl::optional<float> VideoSendStream::GetPacingFactorOverride() const { - return send_stream_.configured_pacing_factor(); -} - -void VideoSendStream::StopPermanentlyAndGetRtpStates( - VideoSendStream::RtpStateMap* rtp_state_map, - VideoSendStream::RtpPayloadStateMap* payload_state_map) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->Stop(); - - running_ = false; - // Always run these cleanup steps regardless of whether running_ was set - // or not. This will unregister callbacks before destruction. - // See `VideoSendStreamImpl::StopVideoSendStream` for more. - send_stream_.Stop(); - *rtp_state_map = send_stream_.GetRtpStates(); - *payload_state_map = send_stream_.GetRtpPayloadStates(); -} - -void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - RTC_DCHECK_RUN_ON(&thread_checker_); - send_stream_.DeliverRtcp(packet, length); -} - -void VideoSendStream::GenerateKeyFrame(const std::vector<std::string>& rids) { - RTC_DCHECK_RUN_ON(&thread_checker_); - // Map rids to layers. If rids is empty, generate a keyframe for all layers. - std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(), - VideoFrameType::kVideoFrameKey); - if (!config_.rtp.rids.empty() && !rids.empty()) { - std::fill(next_frames.begin(), next_frames.end(), - VideoFrameType::kVideoFrameDelta); - for (const auto& rid : rids) { - for (size_t i = 0; i < config_.rtp.rids.size(); i++) { - if (config_.rtp.rids[i] == rid) { - next_frames[i] = VideoFrameType::kVideoFrameKey; - break; - } - } - } - } - if (video_stream_encoder_) { - video_stream_encoder_->SendKeyFrame(next_frames); - } -} - -} // namespace internal -} // namespace webrtc diff --git a/third_party/libwebrtc/video/video_send_stream.h b/third_party/libwebrtc/video/video_send_stream.h deleted file mode 100644 index 4afafcf8e4..0000000000 --- a/third_party/libwebrtc/video/video_send_stream.h +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_VIDEO_SEND_STREAM_H_ -#define VIDEO_VIDEO_SEND_STREAM_H_ - -#include <map> -#include <memory> -#include <string> -#include <vector> - -#include "api/fec_controller.h" -#include "api/field_trials_view.h" -#include "api/sequence_checker.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "call/bitrate_allocator.h" -#include "call/video_receive_stream.h" -#include "call/video_send_stream.h" -#include "rtc_base/event.h" -#include "rtc_base/system/no_unique_address.h" -#include "video/encoder_rtcp_feedback.h" -#include "video/send_delay_stats.h" -#include "video/send_statistics_proxy.h" -#include "video/video_send_stream_impl.h" -#include "video/video_stream_encoder_interface.h" - -namespace webrtc { -namespace test { -class VideoSendStreamPeer; -} // namespace test - -class IvfFileWriter; -class RateLimiter; -class RtpRtcp; -class RtpTransportControllerSendInterface; -class RtcEventLog; - -namespace internal { - -class VideoSendStreamImpl; - -// VideoSendStream implements webrtc::VideoSendStream. -// Internally, it delegates all public methods to VideoSendStreamImpl and / or -// VideoStreamEncoder. -class VideoSendStream : public webrtc::VideoSendStream { - public: - using RtpStateMap = std::map<uint32_t, RtpState>; - using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>; - - VideoSendStream( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - TaskQueueBase* network_queue, - RtcpRttStats* call_stats, - RtpTransportControllerSendInterface* transport, - BitrateAllocatorInterface* bitrate_allocator, - SendDelayStats* send_delay_stats, - RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - const std::map<uint32_t, RtpState>& suspended_ssrcs, - const std::map<uint32_t, RtpPayloadState>& suspended_payload_states, - std::unique_ptr<FecController> fec_controller, - const FieldTrialsView& field_trials); - - ~VideoSendStream() override; - - void DeliverRtcp(const uint8_t* packet, size_t length); - - // webrtc::VideoSendStream implementation. - void Start() override; - void StartPerRtpStream(std::vector<bool> active_layers) override; - void Stop() override; - bool started() override; - - void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override; - std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override; - - void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source, - const DegradationPreference& degradation_preference) override; - - void ReconfigureVideoEncoder(VideoEncoderConfig config) override; - void ReconfigureVideoEncoder(VideoEncoderConfig config, - SetParametersCallback callback) override; - Stats GetStats() override; - - void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, - RtpPayloadStateMap* payload_state_map); - void GenerateKeyFrame(const std::vector<std::string>& rids) override; - - private: - friend class test::VideoSendStreamPeer; - class OnSendPacketObserver : public SendPacketObserver { - public: - OnSendPacketObserver(SendStatisticsProxy* stats_proxy, - SendDelayStats* send_delay_stats) - : stats_proxy_(*stats_proxy), send_delay_stats_(*send_delay_stats) {} - - void OnSendPacket(absl::optional<uint16_t> packet_id, - Timestamp capture_time, - uint32_t ssrc) override { - stats_proxy_.OnSendPacket(ssrc, capture_time); - if (packet_id.has_value()) { - send_delay_stats_.OnSendPacket(*packet_id, capture_time, ssrc); - } - } - - private: - SendStatisticsProxy& stats_proxy_; - SendDelayStats& send_delay_stats_; - }; - - absl::optional<float> GetPacingFactorOverride() const; - - RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; - RtpTransportControllerSendInterface* const transport_; - - SendStatisticsProxy stats_proxy_; - OnSendPacketObserver send_packet_observer_; - const VideoSendStream::Config config_; - const VideoEncoderConfig::ContentType content_type_; - std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_; - EncoderRtcpFeedback encoder_feedback_; - RtpVideoSenderInterface* const rtp_video_sender_; - VideoSendStreamImpl send_stream_; - bool running_ RTC_GUARDED_BY(thread_checker_) = false; -}; - -} // namespace internal -} // namespace webrtc - -#endif // VIDEO_VIDEO_SEND_STREAM_H_ diff --git a/third_party/libwebrtc/video/video_send_stream_impl.cc b/third_party/libwebrtc/video/video_send_stream_impl.cc index ee023d9fec..23dbb7177f 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl.cc +++ b/third_party/libwebrtc/video/video_send_stream_impl.cc @@ -13,20 +13,51 @@ #include <algorithm> #include <cstdint> +#include <map> +#include <memory> #include <string> #include <utility> +#include <vector> #include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/call/bitrate_allocation.h" #include "api/crypto/crypto_options.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_source_interface.h" +#include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "call/bitrate_allocator.h" +#include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/video_send_stream.h" #include "modules/pacing/pacing_controller.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -34,9 +65,18 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" +#include "video/adaptation/overuse_frame_detector.h" +#include "video/config/video_encoder_config.h" +#include "video/encoder_rtcp_feedback.h" +#include "video/frame_cadence_adapter.h" +#include "video/send_delay_stats.h" +#include "video/send_statistics_proxy.h" +#include "video/video_stream_encoder.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { namespace internal { @@ -139,12 +179,15 @@ int CalculateMaxPadBitrateBps(const std::vector<VideoStream>& streams, } absl::optional<AlrExperimentSettings> GetAlrSettings( + const FieldTrialsView& field_trials, VideoEncoderConfig::ContentType content_type) { if (content_type == VideoEncoderConfig::ContentType::kScreen) { return AlrExperimentSettings::CreateFromFieldTrial( + field_trials, AlrExperimentSettings::kScreenshareProbingBweExperimentName); } return AlrExperimentSettings::CreateFromFieldTrial( + field_trials, AlrExperimentSettings::kStrictPacingAndProbingExperimentName); } @@ -171,7 +214,7 @@ absl::optional<float> GetConfiguredPacingFactor( return absl::nullopt; absl::optional<AlrExperimentSettings> alr_settings = - GetAlrSettings(content_type); + GetAlrSettings(field_trials, content_type); if (alr_settings) return alr_settings->pacing_factor; @@ -181,6 +224,19 @@ absl::optional<float> GetConfiguredPacingFactor( default_pacing_config.pacing_factor); } +int GetEncoderPriorityBitrate(std::string codec_name, + const FieldTrialsView& field_trials) { + int priority_bitrate = 0; + if (PayloadStringToCodecType(codec_name) == VideoCodecType::kVideoCodecAV1) { + webrtc::FieldTrialParameter<int> av1_priority_bitrate("bitrate", 0); + webrtc::ParseFieldTrial( + {&av1_priority_bitrate}, + field_trials.Lookup("WebRTC-AV1-OverridePriorityBitrate")); + priority_bitrate = av1_priority_bitrate; + } + return priority_bitrate; +} + uint32_t GetInitialEncoderMaxBitrate(int initial_encoder_max_bitrate) { if (initial_encoder_max_bitrate > 0) return rtc::dchecked_cast<uint32_t>(initial_encoder_max_bitrate); @@ -205,6 +261,107 @@ int GetDefaultMinVideoBitrateBps(VideoCodecType codec_type) { return kDefaultMinVideoBitrateBps; } +size_t CalculateMaxHeaderSize(const RtpConfig& config) { + size_t header_size = kRtpHeaderSize; + size_t extensions_size = 0; + size_t fec_extensions_size = 0; + if (!config.extensions.empty()) { + RtpHeaderExtensionMap extensions_map(config.extensions); + extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(), + extensions_map); + fec_extensions_size = + RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map); + } + header_size += extensions_size; + if (config.flexfec.payload_type >= 0) { + // All FEC extensions again plus maximum FlexFec overhead. + header_size += fec_extensions_size + 32; + } else { + if (config.ulpfec.ulpfec_payload_type >= 0) { + // Header with all the FEC extensions will be repeated plus maximum + // UlpFec overhead. + header_size += fec_extensions_size + 18; + } + if (config.ulpfec.red_payload_type >= 0) { + header_size += 1; // RED header. + } + } + // Additional room for Rtx. + if (config.rtx.payload_type >= 0) + header_size += kRtxHeaderSize; + return header_size; +} + +VideoStreamEncoder::BitrateAllocationCallbackType +GetBitrateAllocationCallbackType(const VideoSendStream::Config& config, + const FieldTrialsView& field_trials) { + if (webrtc::RtpExtension::FindHeaderExtensionByUri( + config.rtp.extensions, + webrtc::RtpExtension::kVideoLayersAllocationUri, + config.crypto_options.srtp.enable_encrypted_rtp_header_extensions + ? RtpExtension::Filter::kPreferEncryptedExtension + : RtpExtension::Filter::kDiscardEncryptedExtension)) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation; + } + if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocation; + } + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing; +} + +RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( + const VideoSendStream::Config* config) { + RtpSenderFrameEncryptionConfig frame_encryption_config; + frame_encryption_config.frame_encryptor = config->frame_encryptor.get(); + frame_encryption_config.crypto_options = config->crypto_options; + return frame_encryption_config; +} + +RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, + EncoderRtcpFeedback* encoder_feedback, + SendStatisticsProxy* stats_proxy, + SendPacketObserver* send_packet_observer) { + RtpSenderObservers observers; + observers.rtcp_rtt_stats = call_stats; + observers.intra_frame_callback = encoder_feedback; + observers.rtcp_loss_notification_observer = encoder_feedback; + observers.report_block_data_observer = stats_proxy; + observers.rtp_stats = stats_proxy; + observers.bitrate_observer = stats_proxy; + observers.frame_count_observer = stats_proxy; + observers.rtcp_type_observer = stats_proxy; + observers.send_packet_observer = send_packet_observer; + return observers; +} + +std::unique_ptr<VideoStreamEncoderInterface> CreateVideoStreamEncoder( + Clock* clock, + int num_cpu_cores, + TaskQueueFactory* task_queue_factory, + SendStatisticsProxy* stats_proxy, + const VideoStreamEncoderSettings& encoder_settings, + VideoStreamEncoder::BitrateAllocationCallbackType + bitrate_allocation_callback_type, + const FieldTrialsView& field_trials, + Metronome* metronome, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue = + task_queue_factory->CreateTaskQueue("EncoderQueue", + TaskQueueFactory::Priority::NORMAL); + TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); + return std::make_unique<VideoStreamEncoder>( + clock, num_cpu_cores, stats_proxy, encoder_settings, + std::make_unique<OveruseFrameDetector>(stats_proxy), + FrameCadenceAdapterInterface::Create( + clock, encoder_queue_ptr, metronome, + /*worker_queue=*/TaskQueueBase::Current(), field_trials), + std::move(encoder_queue), bitrate_allocation_callback_type, field_trials, + encoder_selector); +} + } // namespace PacingConfig::PacingConfig(const FieldTrialsView& field_trials) @@ -218,58 +375,90 @@ PacingConfig::~PacingConfig() = default; VideoSendStreamImpl::VideoSendStreamImpl( Clock* clock, - SendStatisticsProxy* stats_proxy, + int num_cpu_cores, + TaskQueueFactory* task_queue_factory, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, + Metronome* metronome, BitrateAllocatorInterface* bitrate_allocator, - VideoStreamEncoderInterface* video_stream_encoder, - const VideoSendStream::Config* config, - int initial_encoder_max_bitrate, - double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender, - const FieldTrialsView& field_trials) - : clock_(clock), - has_alr_probing_(config->periodic_alr_bandwidth_probing || - GetAlrSettings(content_type)), + SendDelayStats* send_delay_stats, + RtcEventLog* event_log, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config, + const std::map<uint32_t, RtpState>& suspended_ssrcs, + const std::map<uint32_t, RtpPayloadState>& suspended_payload_states, + std::unique_ptr<FecController> fec_controller, + const FieldTrialsView& field_trials, + std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_for_test) + : transport_(transport), + stats_proxy_(clock, config, encoder_config.content_type, field_trials), + send_packet_observer_(&stats_proxy_, send_delay_stats), + config_(std::move(config)), + content_type_(encoder_config.content_type), + video_stream_encoder_( + video_stream_encoder_for_test + ? std::move(video_stream_encoder_for_test) + : CreateVideoStreamEncoder( + clock, + num_cpu_cores, + task_queue_factory, + &stats_proxy_, + config_.encoder_settings, + GetBitrateAllocationCallbackType(config_, field_trials), + field_trials, + metronome, + config_.encoder_selector)), + encoder_feedback_( + clock, + config_.rtp.ssrcs, + video_stream_encoder_.get(), + [this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) { + return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums); + }), + rtp_video_sender_(transport->CreateRtpVideoSender( + suspended_ssrcs, + suspended_payload_states, + config_.rtp, + config_.rtcp_report_interval_ms, + config_.send_transport, + CreateObservers(call_stats, + &encoder_feedback_, + &stats_proxy_, + &send_packet_observer_), + event_log, + std::move(fec_controller), + CreateFrameEncryptionConfig(&config_), + config_.frame_transformer)), + clock_(clock), + has_alr_probing_( + config_.periodic_alr_bandwidth_probing || + GetAlrSettings(field_trials, encoder_config.content_type)), pacing_config_(PacingConfig(field_trials)), - stats_proxy_(stats_proxy), - config_(config), worker_queue_(TaskQueueBase::Current()), timed_out_(false), - transport_(transport), + bitrate_allocator_(bitrate_allocator), disable_padding_(true), max_padding_bitrate_(0), encoder_min_bitrate_bps_(0), encoder_max_bitrate_bps_( - GetInitialEncoderMaxBitrate(initial_encoder_max_bitrate)), + GetInitialEncoderMaxBitrate(encoder_config.max_bitrate_bps)), encoder_target_rate_bps_(0), - encoder_bitrate_priority_(initial_encoder_bitrate_priority), - video_stream_encoder_(video_stream_encoder), - rtp_video_sender_(rtp_video_sender), - configured_pacing_factor_(GetConfiguredPacingFactor(*config_, - content_type, + encoder_bitrate_priority_(encoder_config.bitrate_priority), + encoder_av1_priority_bitrate_override_bps_( + GetEncoderPriorityBitrate(config_.rtp.payload_name, field_trials)), + configured_pacing_factor_(GetConfiguredPacingFactor(config_, + content_type_, pacing_config_, field_trials)) { - RTC_DCHECK_GE(config_->rtp.payload_type, 0); - RTC_DCHECK_LE(config_->rtp.payload_type, 127); - RTC_DCHECK(!config_->rtp.ssrcs.empty()); + RTC_DCHECK_GE(config_.rtp.payload_type, 0); + RTC_DCHECK_LE(config_.rtp.payload_type, 127); + RTC_DCHECK(!config_.rtp.ssrcs.empty()); RTC_DCHECK(transport_); - RTC_DCHECK_NE(initial_encoder_max_bitrate, 0); - RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_->ToString(); + RTC_DCHECK_NE(encoder_max_bitrate_bps_, 0); + RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_.ToString(); - RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled()); - - // Only request rotation at the source when we positively know that the remote - // side doesn't support the rotation extension. This allows us to prepare the - // encoder in the expectation that rotation is supported - which is the common - // case. - bool rotation_applied = absl::c_none_of( - config_->rtp.extensions, [](const RtpExtension& extension) { - return extension.uri == RtpExtension::kVideoRotationUri; - }); - - video_stream_encoder_->SetSink(this, rotation_applied); + RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled(field_trials)); absl::optional<bool> enable_alr_bw_probing; @@ -277,7 +466,7 @@ VideoSendStreamImpl::VideoSendStreamImpl( // pacing settings. if (configured_pacing_factor_) { absl::optional<AlrExperimentSettings> alr_settings = - GetAlrSettings(content_type); + GetAlrSettings(field_trials, content_type_); int queue_time_limit_ms; if (alr_settings) { enable_alr_bw_probing = true; @@ -289,11 +478,11 @@ VideoSendStreamImpl::VideoSendStreamImpl( queue_time_limit_ms = pacing_config_.max_pacing_delay.Get().ms(); } - transport->SetQueueTimeLimit(queue_time_limit_ms); + transport_->SetQueueTimeLimit(queue_time_limit_ms); } - if (config_->periodic_alr_bandwidth_probing) { - enable_alr_bw_probing = config_->periodic_alr_bandwidth_probing; + if (config_.periodic_alr_bandwidth_probing) { + enable_alr_bw_probing = config_.periodic_alr_bandwidth_probing; } if (enable_alr_bw_probing) { @@ -303,13 +492,110 @@ VideoSendStreamImpl::VideoSendStreamImpl( if (configured_pacing_factor_) transport_->SetPacingFactor(*configured_pacing_factor_); + // Only request rotation at the source when we positively know that the remote + // side doesn't support the rotation extension. This allows us to prepare the + // encoder in the expectation that rotation is supported - which is the common + // case. + bool rotation_applied = absl::c_none_of( + config_.rtp.extensions, [](const RtpExtension& extension) { + return extension.uri == RtpExtension::kVideoRotationUri; + }); + + video_stream_encoder_->SetSink(this, rotation_applied); video_stream_encoder_->SetStartBitrate( bitrate_allocator_->GetStartBitrate(this)); + video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_); + ReconfigureVideoEncoder(std::move(encoder_config)); } VideoSendStreamImpl::~VideoSendStreamImpl() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_->ToString(); + RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_.ToString(); + RTC_DCHECK(!started()); + transport_->DestroyRtpVideoSender(rtp_video_sender_); +} + +void VideoSendStreamImpl::AddAdaptationResource( + rtc::scoped_refptr<Resource> resource) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->AddAdaptationResource(resource); +} + +std::vector<rtc::scoped_refptr<Resource>> +VideoSendStreamImpl::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return video_stream_encoder_->GetAdaptationResources(); +} + +void VideoSendStreamImpl::SetSource( + rtc::VideoSourceInterface<webrtc::VideoFrame>* source, + const DegradationPreference& degradation_preference) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->SetSource(source, degradation_preference); +} + +void VideoSendStreamImpl::ReconfigureVideoEncoder(VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void VideoSendStreamImpl::ReconfigureVideoEncoder( + VideoEncoderConfig config, + SetParametersCallback callback) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_EQ(content_type_, config.content_type); + RTC_LOG(LS_VERBOSE) << "Encoder config: " << config.ToString() + << " VideoSendStream config: " << config_.ToString(); + video_stream_encoder_->ConfigureEncoder( + std::move(config), + config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), + std::move(callback)); +} + +VideoSendStream::Stats VideoSendStreamImpl::GetStats() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return stats_proxy_.GetStats(); +} + +absl::optional<float> VideoSendStreamImpl::GetPacingFactorOverride() const { + return configured_pacing_factor_; +} + +void VideoSendStreamImpl::StopPermanentlyAndGetRtpStates( + VideoSendStreamImpl::RtpStateMap* rtp_state_map, + VideoSendStreamImpl::RtpPayloadStateMap* payload_state_map) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->Stop(); + + running_ = false; + // Always run these cleanup steps regardless of whether running_ was set + // or not. This will unregister callbacks before destruction. + // See `VideoSendStreamImpl::StopVideoSendStream` for more. + Stop(); + *rtp_state_map = GetRtpStates(); + *payload_state_map = GetRtpPayloadStates(); +} + +void VideoSendStreamImpl::GenerateKeyFrame( + const std::vector<std::string>& rids) { + RTC_DCHECK_RUN_ON(&thread_checker_); + // Map rids to layers. If rids is empty, generate a keyframe for all layers. + std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(), + VideoFrameType::kVideoFrameKey); + if (!config_.rtp.rids.empty() && !rids.empty()) { + std::fill(next_frames.begin(), next_frames.end(), + VideoFrameType::kVideoFrameDelta); + for (const auto& rid : rids) { + for (size_t i = 0; i < config_.rtp.rids.size(); i++) { + if (config_.rtp.rids[i] == rid) { + next_frames[i] = VideoFrameType::kVideoFrameKey; + break; + } + } + } + } + if (video_stream_encoder_) { + video_stream_encoder_->SendKeyFrame(next_frames); + } } void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { @@ -317,9 +603,35 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { rtp_video_sender_->DeliverRtcp(packet, length); } +bool VideoSendStreamImpl::started() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return rtp_video_sender_->IsActive(); +} + +void VideoSendStreamImpl::Start() { + const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true); + StartPerRtpStream(active_layers); +} + void VideoSendStreamImpl::StartPerRtpStream( const std::vector<bool> active_layers) { RTC_DCHECK_RUN_ON(&thread_checker_); + + rtc::StringBuilder active_layers_string; + active_layers_string << "{"; + for (size_t i = 0; i < active_layers.size(); ++i) { + if (active_layers[i]) { + active_layers_string << "1"; + } else { + active_layers_string << "0"; + } + if (i < active_layers.size() - 1) { + active_layers_string << ", "; + } + } + active_layers_string << "}"; + RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str(); + bool previously_active = rtp_video_sender_->IsActive(); rtp_video_sender_->SetActiveModules(active_layers); if (!rtp_video_sender_->IsActive() && previously_active) { @@ -377,7 +689,7 @@ void VideoSendStreamImpl::StopVideoSendStream() { check_encoder_activity_task_.Stop(); video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0); - stats_proxy_->OnSetEncoderTargetRate(0); + stats_proxy_.OnSetEncoderTargetRate(0); } void VideoSendStreamImpl::SignalEncoderTimedOut() { @@ -460,8 +772,8 @@ MediaStreamAllocationConfig VideoSendStreamImpl::GetAllocationConfig() const { static_cast<uint32_t>(encoder_min_bitrate_bps_), encoder_max_bitrate_bps_, static_cast<uint32_t>(disable_padding_ ? 0 : max_padding_bitrate_), - /* priority_bitrate */ 0, - !config_->suspend_below_min_bitrate, + encoder_av1_priority_bitrate_override_bps_, + !config_.suspend_below_min_bitrate, encoder_bitrate_priority_}; } @@ -474,12 +786,12 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( RTC_DCHECK(!worker_queue_->IsCurrent()); auto closure = [this, streams = std::move(streams), is_svc, content_type, min_transmit_bitrate_bps]() mutable { - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); + RTC_DCHECK_GE(config_.rtp.ssrcs.size(), streams.size()); TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); RTC_DCHECK_RUN_ON(&thread_checker_); const VideoCodecType codec_type = - PayloadStringToCodecType(config_->rtp.payload_name); + PayloadStringToCodecType(config_.rtp.payload_name); const absl::optional<DataRate> experimental_min_bitrate = GetExperimentalMinVideoBitrate(codec_type); @@ -508,11 +820,11 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( // TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead. max_padding_bitrate_ = CalculateMaxPadBitrateBps( streams, is_svc, content_type, min_transmit_bitrate_bps, - config_->suspend_below_min_bitrate, has_alr_probing_); + config_.suspend_below_min_bitrate, has_alr_probing_); // Clear stats for disabled layers. - for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { - stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); + for (size_t i = streams.size(); i < config_.rtp.ssrcs.size(); ++i) { + stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]); } const size_t num_temporal_layers = @@ -588,7 +900,7 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) { update.stable_target_bitrate = update.target_bitrate; } - rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_->GetSendFrameRate()); + rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_.GetSendFrameRate()); encoder_target_rate_bps_ = rtp_video_sender_->GetPayloadBitrateBps(); const uint32_t protection_bitrate_bps = rtp_video_sender_->GetProtectionBitrateBps(); @@ -619,7 +931,7 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) { encoder_target_rate, encoder_stable_target_rate, link_allocation, rtc::dchecked_cast<uint8_t>(update.packet_loss_ratio * 256), update.round_trip_time.ms(), update.cwnd_reduce_ratio); - stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); + stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps_); return protection_bitrate_bps; } diff --git a/third_party/libwebrtc/video/video_send_stream_impl.h b/third_party/libwebrtc/video/video_send_stream_impl.h index c5e0980f6d..758e12c095 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl.h +++ b/third_party/libwebrtc/video/video_send_stream_impl.h @@ -16,21 +16,22 @@ #include <atomic> #include <map> #include <memory> +#include <string> #include <vector> #include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" -#include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_encoder.h" #include "call/bitrate_allocator.h" #include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" -#include "modules/include/module_common_types.h" +#include "call/video_send_stream.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -38,10 +39,17 @@ #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "video/config/video_encoder_config.h" +#include "video/encoder_rtcp_feedback.h" +#include "video/send_delay_stats.h" #include "video/send_statistics_proxy.h" #include "video/video_stream_encoder_interface.h" namespace webrtc { + +namespace test { +class VideoSendStreamPeer; +} // namespace test + namespace internal { // Pacing buffer config; overridden by ALR config if provided. @@ -54,32 +62,58 @@ struct PacingConfig { FieldTrialParameter<TimeDelta> max_pacing_delay; }; -// VideoSendStreamImpl implements internal::VideoSendStream. -// It is created and destroyed on `rtp_transport_queue`. The intent is to -// decrease the need for locking and to ensure methods are called in sequence. -// Public methods except `DeliverRtcp` must be called on `rtp_transport_queue`. -// DeliverRtcp is called on the libjingle worker thread or a network thread. +// VideoSendStreamImpl implements webrtc::VideoSendStream. +// It is created and destroyed on `worker queue`. The intent is to // An encoder may deliver frames through the EncodedImageCallback on an // arbitrary thread. -class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, +class VideoSendStreamImpl : public webrtc::VideoSendStream, + public webrtc::BitrateAllocatorObserver, public VideoStreamEncoderInterface::EncoderSink { public: + using RtpStateMap = std::map<uint32_t, RtpState>; + using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>; + VideoSendStreamImpl(Clock* clock, - SendStatisticsProxy* stats_proxy, + int num_cpu_cores, + TaskQueueFactory* task_queue_factory, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, + Metronome* metronome, BitrateAllocatorInterface* bitrate_allocator, - VideoStreamEncoderInterface* video_stream_encoder, - const VideoSendStream::Config* config, - int initial_encoder_max_bitrate, - double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender, - const FieldTrialsView& field_trials); + SendDelayStats* send_delay_stats, + RtcEventLog* event_log, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config, + const RtpStateMap& suspended_ssrcs, + const RtpPayloadStateMap& suspended_payload_states, + std::unique_ptr<FecController> fec_controller, + const FieldTrialsView& field_trials, + std::unique_ptr<VideoStreamEncoderInterface> + video_stream_encoder_for_test = nullptr); ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void StartPerRtpStream(std::vector<bool> active_layers); - void Stop(); + + // webrtc::VideoSendStream implementation. + void Start() override; + void StartPerRtpStream(std::vector<bool> active_layers) override; + void Stop() override; + bool started() override; + + void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override; + std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override; + + void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source, + const DegradationPreference& degradation_preference) override; + + void ReconfigureVideoEncoder(VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) override; + Stats GetStats() override; + + void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, + RtpPayloadStateMap* payload_state_map); + void GenerateKeyFrame(const std::vector<std::string>& rids) override; // TODO(holmer): Move these to RtpTransportControllerSend. std::map<uint32_t, RtpState> GetRtpStates() const; @@ -91,6 +125,28 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, } private: + friend class test::VideoSendStreamPeer; + class OnSendPacketObserver : public SendPacketObserver { + public: + OnSendPacketObserver(SendStatisticsProxy* stats_proxy, + SendDelayStats* send_delay_stats) + : stats_proxy_(*stats_proxy), send_delay_stats_(*send_delay_stats) {} + + void OnSendPacket(absl::optional<uint16_t> packet_id, + Timestamp capture_time, + uint32_t ssrc) override { + stats_proxy_.OnSendPacket(ssrc, capture_time); + if (packet_id.has_value()) { + send_delay_stats_.OnSendPacket(*packet_id, capture_time, ssrc); + } + } + + private: + SendStatisticsProxy& stats_proxy_; + SendDelayStats& send_delay_stats_; + }; + + absl::optional<float> GetPacingFactorOverride() const; // Implements BitrateAllocatorObserver. uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override; @@ -130,13 +186,22 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, RTC_RUN_ON(thread_checker_); RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; + + RtpTransportControllerSendInterface* const transport_; + + SendStatisticsProxy stats_proxy_; + OnSendPacketObserver send_packet_observer_; + const VideoSendStream::Config config_; + const VideoEncoderConfig::ContentType content_type_; + std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_; + EncoderRtcpFeedback encoder_feedback_; + RtpVideoSenderInterface* const rtp_video_sender_; + bool running_ RTC_GUARDED_BY(thread_checker_) = false; + Clock* const clock_; const bool has_alr_probing_; const PacingConfig pacing_config_; - SendStatisticsProxy* const stats_proxy_; - const VideoSendStream::Config* const config_; - TaskQueueBase* const worker_queue_; RepeatingTaskHandle check_encoder_activity_task_ @@ -145,7 +210,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, std::atomic_bool activity_; bool timed_out_ RTC_GUARDED_BY(thread_checker_); - RtpTransportControllerSendInterface* const transport_; BitrateAllocatorInterface* const bitrate_allocator_; bool disable_padding_ RTC_GUARDED_BY(thread_checker_); @@ -154,9 +218,8 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, uint32_t encoder_max_bitrate_bps_ RTC_GUARDED_BY(thread_checker_); uint32_t encoder_target_rate_bps_ RTC_GUARDED_BY(thread_checker_); double encoder_bitrate_priority_ RTC_GUARDED_BY(thread_checker_); - - VideoStreamEncoderInterface* const video_stream_encoder_; - RtpVideoSenderInterface* const rtp_video_sender_; + const int encoder_av1_priority_bitrate_override_bps_ + RTC_GUARDED_BY(thread_checker_); ScopedTaskSafety worker_queue_safety_; diff --git a/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc b/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc index c88ad06cfb..ba492ae66f 100644 --- a/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc +++ b/third_party/libwebrtc/video/video_send_stream_impl_unittest.cc @@ -11,31 +11,50 @@ #include "video/video_send_stream_impl.h" #include <algorithm> +#include <cstddef> +#include <cstdint> +#include <map> #include <memory> #include <string> +#include <utility> +#include <vector> #include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/call/bitrate_allocation.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/sequence_checker.h" +#include "api/rtp_parameters.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "call/rtp_video_sender.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_layers_allocation.h" +#include "api/video_codecs/video_encoder.h" +#include "call/bitrate_allocator.h" +#include "call/rtp_config.h" +#include "call/rtp_video_sender_interface.h" #include "call/test/mock_bitrate_allocator.h" #include "call/test/mock_rtp_transport_controller_send.h" +#include "call/video_send_stream.h" +#include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "modules/video_coding/fec_controller_default.h" -#include "rtc_base/event.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/experiments/alr_experiment.h" -#include "rtc_base/fake_clock.h" -#include "rtc_base/logging.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" +#include "video/config/video_encoder_config.h" +#include "video/send_delay_stats.h" +#include "video/send_statistics_proxy.h" #include "video/test/mock_video_stream_encoder.h" -#include "video/video_send_stream.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { @@ -114,6 +133,7 @@ BitrateAllocationUpdate CreateAllocation(int bitrate_bps) { update.round_trip_time = TimeDelta::Zero(); return update; } + } // namespace class VideoSendStreamImplTest : public ::testing::Test { @@ -155,17 +175,35 @@ class VideoSendStreamImplTest : public ::testing::Test { std::unique_ptr<VideoSendStreamImpl> CreateVideoSendStreamImpl( int initial_encoder_max_bitrate, double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type) { + VideoEncoderConfig::ContentType content_type, + absl::optional<std::string> codec = std::nullopt) { EXPECT_CALL(bitrate_allocator_, GetStartBitrate(_)) .WillOnce(Return(123000)); + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = initial_encoder_max_bitrate; + encoder_config.bitrate_priority = initial_encoder_bitrate_priority; + encoder_config.content_type = content_type; + if (codec) { + config_.rtp.payload_name = *codec; + } + std::map<uint32_t, RtpState> suspended_ssrcs; std::map<uint32_t, RtpPayloadState> suspended_payload_states; + + std::unique_ptr<NiceMock<MockVideoStreamEncoder>> video_stream_encoder = + std::make_unique<NiceMock<MockVideoStreamEncoder>>(); + video_stream_encoder_ = video_stream_encoder.get(); + auto ret = std::make_unique<VideoSendStreamImpl>( - time_controller_.GetClock(), &stats_proxy_, &transport_controller_, - &bitrate_allocator_, &video_stream_encoder_, &config_, - initial_encoder_max_bitrate, initial_encoder_bitrate_priority, - content_type, &rtp_video_sender_, field_trials_); + time_controller_.GetClock(), + /*num_cpu_cores=*/1, time_controller_.GetTaskQueueFactory(), + /*call_stats=*/nullptr, &transport_controller_, + /*metronome=*/nullptr, &bitrate_allocator_, &send_delay_stats_, + /*event_log=*/nullptr, config_.Copy(), encoder_config.Copy(), + suspended_ssrcs, suspended_payload_states, + /*fec_controller=*/nullptr, field_trials_, + std::move(video_stream_encoder)); // The call to GetStartBitrate() executes asynchronously on the tq. // Ensure all tasks get to run. @@ -181,7 +219,7 @@ class VideoSendStreamImplTest : public ::testing::Test { NiceMock<MockTransport> transport_; NiceMock<MockRtpTransportControllerSend> transport_controller_; NiceMock<MockBitrateAllocator> bitrate_allocator_; - NiceMock<MockVideoStreamEncoder> video_stream_encoder_; + NiceMock<MockVideoStreamEncoder>* video_stream_encoder_ = nullptr; NiceMock<MockRtpVideoSender> rtp_video_sender_; std::vector<bool> active_modules_; @@ -218,6 +256,9 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { config_.suspend_below_min_bitrate = kSuspend; config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, 1); + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); + auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); @@ -248,9 +289,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { int min_transmit_bitrate_bps = 30000; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke( [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -284,6 +322,9 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, 1); config_.periodic_alr_bandwidth_probing = true; + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); + auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); @@ -316,9 +357,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { // low_stream.target_bitrate_bps + high_stream.min_bitrate_bps. int min_transmit_bitrate_bps = 400000; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke( [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -347,6 +385,8 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithSimulcastVideoHysteresis) { test::ScopedKeyValueConfig hysteresis_experiment( field_trials_, "WebRTC-VideoRateControl/video_hysteresis:1.25/"); + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, @@ -374,9 +414,6 @@ TEST_F(VideoSendStreamImplTest, high_stream.max_qp = 56; high_stream.bitrate_priority = 1; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke([&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -397,7 +434,8 @@ TEST_F(VideoSendStreamImplTest, ->OnEncoderConfigurationChanged( std::vector<VideoStream>{low_stream, high_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, - /*min_transmit_bitrate_bps=*/0); + /*min_transmit_bitrate_bps=*/ + 0); }); time_controller_.AdvanceTime(TimeDelta::Zero()); vss_impl->Stop(); @@ -681,6 +719,76 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { vss_impl->Stop(); } +TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigInactiveByDefault) { + auto vss_impl = CreateVideoSendStreamImpl( + kDefaultInitialBitrateBps, kDefaultBitratePriority, + VideoEncoderConfig::ContentType::kRealtimeVideo); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 0))); + vss_impl->StartPerRtpStream({true}); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigAffectsAV1) { + test::ScopedFieldTrials override_priority_bitrate( + "WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/"); + auto vss_impl = CreateVideoSendStreamImpl( + kDefaultInitialBitrateBps, kDefaultBitratePriority, + VideoEncoderConfig::ContentType::kRealtimeVideo, "AV1"); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000))); + vss_impl->StartPerRtpStream({true}); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + PriorityBitrateConfigSurvivesConfigurationChange) { + VideoStream qvga_stream; + qvga_stream.width = 320; + qvga_stream.height = 180; + qvga_stream.max_framerate = 30; + qvga_stream.min_bitrate_bps = 30000; + qvga_stream.target_bitrate_bps = 150000; + qvga_stream.max_bitrate_bps = 200000; + qvga_stream.max_qp = 56; + qvga_stream.bitrate_priority = 1; + + int min_transmit_bitrate_bps = 30000; + + test::ScopedFieldTrials override_priority_bitrate( + "WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/"); + auto vss_impl = CreateVideoSendStreamImpl( + kDefaultInitialBitrateBps, kDefaultBitratePriority, + VideoEncoderConfig::ContentType::kRealtimeVideo, "AV1"); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000))) + .Times(2); + vss_impl->StartPerRtpStream({true}); + + encoder_queue_->PostTask([&] { + static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get()) + ->OnEncoderConfigurationChanged( + std::vector<VideoStream>{qvga_stream}, false, + VideoEncoderConfig::ContentType::kRealtimeVideo, + min_transmit_bitrate_bps); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { const bool kSuspend = false; config_.suspend_below_min_bitrate = kSuspend; @@ -723,7 +831,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(network_constrained_rate.bps())); EXPECT_CALL( - video_stream_encoder_, + *video_stream_encoder_, OnBitrateUpdated(network_constrained_rate, network_constrained_rate, network_constrained_rate, 0, _, 0)); static_cast<BitrateAllocatorObserver*>(vss_impl.get()) @@ -740,7 +848,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _)); EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(rate_with_headroom.bps())); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, rate_with_headroom, 0, _, 0)); static_cast<BitrateAllocatorObserver*>(vss_impl.get()) @@ -757,7 +865,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { .WillOnce(Return(rate_with_headroom.bps())); const DataRate headroom_minus_protection = rate_with_headroom - DataRate::BitsPerSec(protection_bitrate_bps); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, headroom_minus_protection, 0, _, 0)); static_cast<BitrateAllocatorObserver*>(vss_impl.get()) @@ -770,14 +878,14 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _)); EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(rate_with_headroom.bps())); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, qvga_max_bitrate, 0, _, 0)); static_cast<BitrateAllocatorObserver*>(vss_impl.get()) ->OnBitrateUpdated(update); // Set rates to zero on stop. - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0)); vss_impl->Stop(); diff --git a/third_party/libwebrtc/video/video_send_stream_tests.cc b/third_party/libwebrtc/video/video_send_stream_tests.cc index 3241740d95..37acd2dc49 100644 --- a/third_party/libwebrtc/video/video_send_stream_tests.cc +++ b/third_party/libwebrtc/video/video_send_stream_tests.cc @@ -75,7 +75,7 @@ #include "video/config/encoder_stream_factory.h" #include "video/send_statistics_proxy.h" #include "video/transport_adapter.h" -#include "video/video_send_stream.h" +#include "video/video_send_stream_impl.h" namespace webrtc { namespace test { @@ -83,13 +83,13 @@ class VideoSendStreamPeer { public: explicit VideoSendStreamPeer(webrtc::VideoSendStream* base_class_stream) : internal_stream_( - static_cast<internal::VideoSendStream*>(base_class_stream)) {} + static_cast<internal::VideoSendStreamImpl*>(base_class_stream)) {} absl::optional<float> GetPacingFactorOverride() const { return internal_stream_->GetPacingFactorOverride(); } private: - internal::VideoSendStream const* const internal_stream_; + internal::VideoSendStreamImpl const* const internal_stream_; }; } // namespace test diff --git a/third_party/libwebrtc/video/video_stream_encoder.cc b/third_party/libwebrtc/video/video_stream_encoder.cc index 669f165635..d74f440996 100644 --- a/third_party/libwebrtc/video/video_stream_encoder.cc +++ b/third_party/libwebrtc/video/video_stream_encoder.cc @@ -713,10 +713,10 @@ VideoStreamEncoder::VideoStreamEncoder( RTC_DCHECK_GE(number_of_cores, 1); frame_cadence_adapter_->Initialize(&cadence_callback_); - stream_resource_manager_.Initialize(encoder_queue_.Get()); + stream_resource_manager_.Initialize(encoder_queue_.get()); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); resource_adaptation_processor_ = std::make_unique<ResourceAdaptationProcessor>( @@ -742,6 +742,14 @@ VideoStreamEncoder::~VideoStreamEncoder() { RTC_DCHECK_RUN_ON(worker_queue_); RTC_DCHECK(!video_source_sink_controller_.HasSource()) << "Must call ::Stop() before destruction."; + + // The queue must be destroyed before its pointer is invalidated to avoid race + // between destructor and running task that check if function is called on the + // encoder_queue_. + // std::unique_ptr destructor does the same two operations in reverse order as + // it doesn't expect member would be used after its destruction has started. + encoder_queue_.get_deleter()(encoder_queue_.get()); + encoder_queue_.release(); } void VideoStreamEncoder::Stop() { @@ -750,8 +758,8 @@ void VideoStreamEncoder::Stop() { rtc::Event shutdown_event; absl::Cleanup shutdown = [&shutdown_event] { shutdown_event.Set(); }; - encoder_queue_.PostTask([this, shutdown = std::move(shutdown)] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, shutdown = std::move(shutdown)] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (resource_adaptation_processor_) { stream_resource_manager_.StopManagedResources(); for (auto* constraint : adaptation_constraints_) { @@ -779,8 +787,8 @@ void VideoStreamEncoder::Stop() { void VideoStreamEncoder::SetFecControllerOverride( FecControllerOverride* fec_controller_override) { - encoder_queue_.PostTask([this, fec_controller_override] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, fec_controller_override] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(!fec_controller_override_); fec_controller_override_ = fec_controller_override; if (encoder_) { @@ -798,10 +806,10 @@ void VideoStreamEncoder::AddAdaptationResource( // of this MapResourceToReason() call. TRACE_EVENT_ASYNC_BEGIN0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - encoder_queue_.PostTask([this, resource = std::move(resource)] { + encoder_queue_->PostTask([this, resource = std::move(resource)] { TRACE_EVENT_ASYNC_END0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); }); @@ -816,8 +824,8 @@ VideoStreamEncoder::GetAdaptationResources() { // here. rtc::Event event; std::vector<rtc::scoped_refptr<Resource>> resources; - encoder_queue_.PostTask([&] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([&] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); resources = resource_adaptation_processor_->GetResources(); event.Set(); }); @@ -833,8 +841,8 @@ void VideoStreamEncoder::SetSource( input_state_provider_.OnHasInputChanged(source); // This may trigger reconfiguring the QualityScaler on the encoder queue. - encoder_queue_.PostTask([this, degradation_preference] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, degradation_preference] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); degradation_preference_manager_->SetDegradationPreference( degradation_preference); stream_resource_manager_.SetDegradationPreferences(degradation_preference); @@ -852,15 +860,15 @@ void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { video_source_sink_controller_.SetRotationApplied(rotation_applied); video_source_sink_controller_.PushSourceSinkSettings(); - encoder_queue_.PostTask([this, sink] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, sink] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); sink_ = sink; }); } void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { - encoder_queue_.PostTask([this, start_bitrate_bps] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, start_bitrate_bps] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_INFO) << "SetStartBitrate " << start_bitrate_bps; encoder_target_bitrate_bps_ = start_bitrate_bps != 0 ? absl::optional<uint32_t>(start_bitrate_bps) @@ -879,10 +887,10 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length, SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_queue_); - encoder_queue_.PostTask([this, config = std::move(config), - max_data_payload_length, - callback = std::move(callback)]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, config = std::move(config), + max_data_payload_length, + callback = std::move(callback)]() mutable { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; @@ -1484,7 +1492,7 @@ void VideoStreamEncoder::OnEncoderSettingsChanged() { void VideoStreamEncoder::OnFrame(Timestamp post_time, bool queue_overload, const VideoFrame& video_frame) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); VideoFrame incoming_frame = video_frame; // In some cases, e.g., when the frame from decoder is fed to encoder, @@ -1579,7 +1587,7 @@ void VideoStreamEncoder::OnDiscardedFrame() { } bool VideoStreamEncoder::EncoderPaused() const { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // Pause video if paused by caller or as long as the network is down or the // pacer queue has grown too large in buffered mode. // If the pacer queue has grown too large or the network is down, @@ -1589,7 +1597,7 @@ bool VideoStreamEncoder::EncoderPaused() const { } void VideoStreamEncoder::TraceFrameDropStart() { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // Start trace event only on the first frame after encoder is paused. if (!encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); @@ -1598,7 +1606,7 @@ void VideoStreamEncoder::TraceFrameDropStart() { } void VideoStreamEncoder::TraceFrameDropEnd() { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // End trace event on first frame after encoder resumes, if frame was dropped. if (encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); @@ -1731,7 +1739,7 @@ void VideoStreamEncoder::SetEncoderRates( void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); input_state_provider_.OnFrameSizeObserved(video_frame.size()); if (!last_frame_info_ || video_frame.width() != last_frame_info_->width || @@ -1863,7 +1871,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_VERBOSE) << __func__ << " posted " << time_when_posted_us << " ntp time " << video_frame.ntp_time_ms(); @@ -2030,11 +2038,11 @@ void VideoStreamEncoder::RequestRefreshFrame() { void VideoStreamEncoder::SendKeyFrame( const std::vector<VideoFrameType>& layers) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, layers] { SendKeyFrame(layers); }); + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, layers] { SendKeyFrame(layers); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); RTC_DCHECK(!next_frame_types_.empty()); @@ -2059,13 +2067,13 @@ void VideoStreamEncoder::SendKeyFrame( void VideoStreamEncoder::OnLossNotification( const VideoEncoder::LossNotification& loss_notification) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask( + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask( [this, loss_notification] { OnLossNotification(loss_notification); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (encoder_) { encoder_->OnLossNotification(loss_notification); } @@ -2120,10 +2128,11 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( // need to update on quality convergence. unsigned int image_width = image_copy._encodedWidth; unsigned int image_height = image_copy._encodedHeight; - encoder_queue_.PostTask([this, codec_type, image_width, image_height, - simulcast_index, - at_target_quality = image_copy.IsAtTargetQuality()] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, codec_type, image_width, image_height, + simulcast_index, + at_target_quality = + image_copy.IsAtTargetQuality()] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // Let the frame cadence adapter know about quality convergence. if (frame_cadence_adapter_) @@ -2201,15 +2210,15 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( void VideoStreamEncoder::OnDroppedFrame(DropReason reason) { sink_->OnDroppedFrame(reason); - encoder_queue_.PostTask([this, reason] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, reason] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); stream_resource_manager_.OnFrameDropped(reason); }); } DataRate VideoStreamEncoder::UpdateTargetBitrate(DataRate target_bitrate, double cwnd_reduce_ratio) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); DataRate updated_target_bitrate = target_bitrate; // Drop frames when congestion window pushback ratio is larger than 1 @@ -2241,10 +2250,10 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, int64_t round_trip_time_ms, double cwnd_reduce_ratio) { RTC_DCHECK_GE(link_allocation, target_bitrate); - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, target_bitrate, stable_target_bitrate, - link_allocation, fraction_lost, round_trip_time_ms, - cwnd_reduce_ratio] { + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, target_bitrate, stable_target_bitrate, + link_allocation, fraction_lost, + round_trip_time_ms, cwnd_reduce_ratio] { DataRate updated_target_bitrate = UpdateTargetBitrate(target_bitrate, cwnd_reduce_ratio); OnBitrateUpdated(updated_target_bitrate, stable_target_bitrate, @@ -2253,7 +2262,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); const bool video_is_suspended = target_bitrate == DataRate::Zero(); const bool video_suspension_changed = video_is_suspended != EncoderPaused(); @@ -2353,7 +2362,7 @@ void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( const VideoAdaptationCounters& adaptation_counters, rtc::scoped_refptr<Resource> reason, const VideoSourceRestrictions& unfiltered_restrictions) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_INFO) << "Updating sink restrictions from " << (reason ? reason->Name() : std::string("<null>")) << " to " << restrictions.ToString(); @@ -2379,15 +2388,15 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, int64_t time_sent_us, int temporal_index, DataSize frame_size) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, encoded_image, time_sent_us, temporal_index, - frame_size] { + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, encoded_image, time_sent_us, temporal_index, + frame_size] { RunPostEncode(encoded_image, time_sent_us, temporal_index, frame_size); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); absl::optional<int> encode_duration_us; if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { @@ -2539,8 +2548,8 @@ void VideoStreamEncoder::CheckForAnimatedContent( void VideoStreamEncoder::InjectAdaptationResource( rtc::scoped_refptr<Resource> resource, VideoAdaptationReason reason) { - encoder_queue_.PostTask([this, resource = std::move(resource), reason] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, resource = std::move(resource), reason] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, reason); }); @@ -2549,8 +2558,8 @@ void VideoStreamEncoder::InjectAdaptationResource( void VideoStreamEncoder::InjectAdaptationConstraint( AdaptationConstraint* adaptation_constraint) { rtc::Event event; - encoder_queue_.PostTask([this, adaptation_constraint, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, adaptation_constraint, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (!resource_adaptation_processor_) { // The VideoStreamEncoder was stopped and the processor destroyed before // this task had a chance to execute. No action needed. @@ -2566,8 +2575,8 @@ void VideoStreamEncoder::InjectAdaptationConstraint( void VideoStreamEncoder::AddRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { rtc::Event event; - encoder_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->AddRestrictionsListener(restrictions_listener); event.Set(); @@ -2578,8 +2587,8 @@ void VideoStreamEncoder::AddRestrictionsListenerForTesting( void VideoStreamEncoder::RemoveRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { rtc::Event event; - encoder_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener); event.Set(); diff --git a/third_party/libwebrtc/video/video_stream_encoder.h b/third_party/libwebrtc/video/video_stream_encoder.h index f2c21c12b0..2a542ffe40 100644 --- a/third_party/libwebrtc/video/video_stream_encoder.h +++ b/third_party/libwebrtc/video/video_stream_encoder.h @@ -42,7 +42,6 @@ #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" @@ -136,7 +135,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Used for testing. For example the `ScalingObserverInterface` methods must // be called on `encoder_queue_`. - TaskQueueBase* encoder_queue() { return encoder_queue_.Get(); } + TaskQueueBase* encoder_queue() { return encoder_queue_.get(); } void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, @@ -210,8 +209,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, class DegradationPreferenceManager; - void ReconfigureEncoder() RTC_RUN_ON(&encoder_queue_); - void OnEncoderSettingsChanged() RTC_RUN_ON(&encoder_queue_); + void ReconfigureEncoder() RTC_RUN_ON(encoder_queue_); + void OnEncoderSettingsChanged() RTC_RUN_ON(encoder_queue_); void OnFrame(Timestamp post_time, bool queue_overload, const VideoFrame& video_frame); @@ -225,7 +224,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t time_when_posted_in_ms); // Indicates whether frame should be dropped because the pixel count is too // large for the current bitrate configuration. - bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(&encoder_queue_); + bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(encoder_queue_); // Implements EncodedImageCallback. EncodedImageCallback::Result OnEncodedImage( @@ -241,25 +240,25 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Returns a copy of `rate_settings` with the `bitrate` field updated using // the current VideoBitrateAllocator. EncoderRateSettings UpdateBitrateAllocation( - const EncoderRateSettings& rate_settings) RTC_RUN_ON(&encoder_queue_); + const EncoderRateSettings& rate_settings) RTC_RUN_ON(encoder_queue_); - uint32_t GetInputFramerateFps() RTC_RUN_ON(&encoder_queue_); + uint32_t GetInputFramerateFps() RTC_RUN_ON(encoder_queue_); void SetEncoderRates(const EncoderRateSettings& rate_settings) - RTC_RUN_ON(&encoder_queue_); + RTC_RUN_ON(encoder_queue_); void RunPostEncode(const EncodedImage& encoded_image, int64_t time_sent_us, int temporal_index, DataSize frame_size); - void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_); + void ReleaseEncoder() RTC_RUN_ON(encoder_queue_); // After calling this function `resource_adaptation_processor_` will be null. void ShutdownResourceAdaptationQueue(); void CheckForAnimatedContent(const VideoFrame& frame, int64_t time_when_posted_in_ms) - RTC_RUN_ON(&encoder_queue_); + RTC_RUN_ON(encoder_queue_); - void RequestEncoderSwitch() RTC_RUN_ON(&encoder_queue_); + void RequestEncoderSwitch() RTC_RUN_ON(encoder_queue_); // Augments an EncodedImage received from an encoder with parsable // information. @@ -269,7 +268,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void ProcessDroppedFrame(const VideoFrame& frame, VideoStreamEncoderObserver::DropReason reason) - RTC_RUN_ON(&encoder_queue_); + RTC_RUN_ON(encoder_queue_); const FieldTrialsView& field_trials_; TaskQueueBase* const worker_queue_; @@ -296,67 +295,66 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Frame cadence encoder adapter. Frames enter this adapter first, and it then // forwards them to our OnFrame method. std::unique_ptr<FrameCadenceAdapterInterface> frame_cadence_adapter_ - RTC_GUARDED_BY(&encoder_queue_) RTC_PT_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_) RTC_PT_GUARDED_BY(encoder_queue_); - VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(&encoder_queue_); - std::unique_ptr<VideoEncoder> encoder_ RTC_GUARDED_BY(&encoder_queue_) - RTC_PT_GUARDED_BY(&encoder_queue_); + VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(encoder_queue_); + std::unique_ptr<VideoEncoder> encoder_ RTC_GUARDED_BY(encoder_queue_) + RTC_PT_GUARDED_BY(encoder_queue_); bool encoder_initialized_ = false; std::unique_ptr<VideoBitrateAllocator> rate_allocator_ - RTC_GUARDED_BY(&encoder_queue_) RTC_PT_GUARDED_BY(&encoder_queue_); - int max_framerate_ RTC_GUARDED_BY(&encoder_queue_) = -1; + RTC_GUARDED_BY(encoder_queue_) RTC_PT_GUARDED_BY(encoder_queue_); + int max_framerate_ RTC_GUARDED_BY(encoder_queue_) = -1; // Set when ConfigureEncoder has been called in order to lazy reconfigure the // encoder on the next frame. - bool pending_encoder_reconfiguration_ RTC_GUARDED_BY(&encoder_queue_) = false; + bool pending_encoder_reconfiguration_ RTC_GUARDED_BY(encoder_queue_) = false; // Set when configuration must create a new encoder object, e.g., // because of a codec change. - bool pending_encoder_creation_ RTC_GUARDED_BY(&encoder_queue_) = false; + bool pending_encoder_creation_ RTC_GUARDED_BY(encoder_queue_) = false; absl::InlinedVector<SetParametersCallback, 2> encoder_configuration_callbacks_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); absl::optional<VideoFrameInfo> last_frame_info_ - RTC_GUARDED_BY(&encoder_queue_); - int crop_width_ RTC_GUARDED_BY(&encoder_queue_) = 0; - int crop_height_ RTC_GUARDED_BY(&encoder_queue_) = 0; + RTC_GUARDED_BY(encoder_queue_); + int crop_width_ RTC_GUARDED_BY(encoder_queue_) = 0; + int crop_height_ RTC_GUARDED_BY(encoder_queue_) = 0; absl::optional<uint32_t> encoder_target_bitrate_bps_ - RTC_GUARDED_BY(&encoder_queue_); - size_t max_data_payload_length_ RTC_GUARDED_BY(&encoder_queue_) = 0; + RTC_GUARDED_BY(encoder_queue_); + size_t max_data_payload_length_ RTC_GUARDED_BY(encoder_queue_) = 0; absl::optional<EncoderRateSettings> last_encoder_rate_settings_ - RTC_GUARDED_BY(&encoder_queue_); - bool encoder_paused_and_dropped_frame_ RTC_GUARDED_BY(&encoder_queue_) = - false; + RTC_GUARDED_BY(encoder_queue_); + bool encoder_paused_and_dropped_frame_ RTC_GUARDED_BY(encoder_queue_) = false; // Set to true if at least one frame was sent to encoder since last encoder // initialization. bool was_encode_called_since_last_initialization_ - RTC_GUARDED_BY(&encoder_queue_) = false; + RTC_GUARDED_BY(encoder_queue_) = false; - bool encoder_failed_ RTC_GUARDED_BY(&encoder_queue_) = false; + bool encoder_failed_ RTC_GUARDED_BY(encoder_queue_) = false; Clock* const clock_; // Used to make sure incoming time stamp is increasing for every frame. - int64_t last_captured_timestamp_ RTC_GUARDED_BY(&encoder_queue_) = 0; + int64_t last_captured_timestamp_ RTC_GUARDED_BY(encoder_queue_) = 0; // Delta used for translating between NTP and internal timestamps. - const int64_t delta_ntp_internal_ms_ RTC_GUARDED_BY(&encoder_queue_); + const int64_t delta_ntp_internal_ms_ RTC_GUARDED_BY(encoder_queue_); - int64_t last_frame_log_ms_ RTC_GUARDED_BY(&encoder_queue_); - int captured_frame_count_ RTC_GUARDED_BY(&encoder_queue_) = 0; - int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(&encoder_queue_) = 0; - int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(&encoder_queue_) = 0; - absl::optional<VideoFrame> pending_frame_ RTC_GUARDED_BY(&encoder_queue_); - int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(&encoder_queue_) = 0; + int64_t last_frame_log_ms_ RTC_GUARDED_BY(encoder_queue_); + int captured_frame_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + absl::optional<VideoFrame> pending_frame_ RTC_GUARDED_BY(encoder_queue_); + int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(encoder_queue_) = 0; VideoFrame::UpdateRect accumulated_update_rect_ - RTC_GUARDED_BY(&encoder_queue_); - bool accumulated_update_rect_is_valid_ RTC_GUARDED_BY(&encoder_queue_) = true; + RTC_GUARDED_BY(encoder_queue_); + bool accumulated_update_rect_is_valid_ RTC_GUARDED_BY(encoder_queue_) = true; // Used for automatic content type detection. absl::optional<VideoFrame::UpdateRect> last_update_rect_ - RTC_GUARDED_BY(&encoder_queue_); - Timestamp animation_start_time_ RTC_GUARDED_BY(&encoder_queue_) = + RTC_GUARDED_BY(encoder_queue_); + Timestamp animation_start_time_ RTC_GUARDED_BY(encoder_queue_) = Timestamp::PlusInfinity(); - bool cap_resolution_due_to_video_content_ RTC_GUARDED_BY(&encoder_queue_) = + bool cap_resolution_due_to_video_content_ RTC_GUARDED_BY(encoder_queue_) = false; // Used to correctly ignore changes in update_rect introduced by // resize triggered by animation detection. @@ -364,24 +362,24 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, kNoResize, // Normal operation. kResize, // Resize was triggered by the animation detection. kFirstFrameAfterResize // Resize observed. - } expect_resize_state_ RTC_GUARDED_BY(&encoder_queue_) = + } expect_resize_state_ RTC_GUARDED_BY(encoder_queue_) = ExpectResizeState::kNoResize; FecControllerOverride* fec_controller_override_ - RTC_GUARDED_BY(&encoder_queue_) = nullptr; + RTC_GUARDED_BY(encoder_queue_) = nullptr; absl::optional<int64_t> last_parameters_update_ms_ - RTC_GUARDED_BY(&encoder_queue_); - absl::optional<int64_t> last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); + absl::optional<int64_t> last_encode_info_ms_ RTC_GUARDED_BY(encoder_queue_); - VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_); - VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_); + VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(encoder_queue_); + VideoCodec send_codec_ RTC_GUARDED_BY(encoder_queue_); - FrameDropper frame_dropper_ RTC_GUARDED_BY(&encoder_queue_); + FrameDropper frame_dropper_ RTC_GUARDED_BY(encoder_queue_); // If frame dropper is not force disabled, frame dropping might still be // disabled if VideoEncoder::GetEncoderInfo() indicates that the encoder has a // trusted rate controller. This is determined on a per-frame basis, as the // encoder behavior might dynamically change. - bool force_disable_frame_dropper_ RTC_GUARDED_BY(&encoder_queue_) = false; + bool force_disable_frame_dropper_ RTC_GUARDED_BY(encoder_queue_) = false; // Incremented on worker thread whenever `frame_dropper_` determines that a // frame should be dropped. Decremented on whichever thread runs // OnEncodedImage(), which is only called by one thread but not necessarily @@ -390,16 +388,16 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Congestion window frame drop ratio (drop 1 in every // cwnd_frame_drop_interval_ frames). - absl::optional<int> cwnd_frame_drop_interval_ RTC_GUARDED_BY(&encoder_queue_); + absl::optional<int> cwnd_frame_drop_interval_ RTC_GUARDED_BY(encoder_queue_); // Frame counter for congestion window frame drop. - int cwnd_frame_counter_ RTC_GUARDED_BY(&encoder_queue_) = 0; + int cwnd_frame_counter_ RTC_GUARDED_BY(encoder_queue_) = 0; std::unique_ptr<EncoderBitrateAdjuster> bitrate_adjuster_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // TODO(sprang): Change actually support keyframe per simulcast stream, or // turn this into a simple bool `pending_keyframe_request_`. - std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(&encoder_queue_); + std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(encoder_queue_); FrameEncodeMetadataWriter frame_encode_metadata_writer_{this}; @@ -421,22 +419,22 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, ParseAutomatincAnimationDetectionFieldTrial() const; AutomaticAnimationDetectionExperiment - automatic_animation_detection_experiment_ RTC_GUARDED_BY(&encoder_queue_); + automatic_animation_detection_experiment_ RTC_GUARDED_BY(encoder_queue_); // Provides video stream input states: current resolution and frame rate. VideoStreamInputStateProvider input_state_provider_; const std::unique_ptr<VideoStreamAdapter> video_stream_adapter_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Responsible for adapting input resolution or frame rate to ensure resources // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any // thread. std::unique_ptr<ResourceAdaptationProcessorInterface> - resource_adaptation_processor_ RTC_GUARDED_BY(&encoder_queue_); + resource_adaptation_processor_ RTC_GUARDED_BY(encoder_queue_); std::unique_ptr<DegradationPreferenceManager> degradation_preference_manager_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); std::vector<AdaptationConstraint*> adaptation_constraints_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Handles input, output and stats reporting related to VideoStreamEncoder // specific resources, such as "encode usage percent" measurements and "QP // scaling". Also involved with various mitigations such as initial frame @@ -445,9 +443,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // tied to the VideoStreamEncoder (which is destroyed off the encoder queue) // and its resource list is accessible from any thread. VideoStreamEncoderResourceManager stream_resource_manager_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); std::vector<rtc::scoped_refptr<Resource>> additional_resources_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Carries out the VideoSourceRestrictions provided by the // ResourceAdaptationProcessor, i.e. reconfigures the source of video frames // to provide us with different resolution or frame rate. @@ -479,9 +477,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // so that ownership on restrictions/wants is kept on &encoder_queue_, that // these extra copies would not be needed. absl::optional<VideoSourceRestrictions> latest_restrictions_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); absl::optional<VideoSourceRestrictions> animate_restrictions_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Used to cancel any potentially pending tasks to the worker thread. // Refrenced by tasks running on `encoder_queue_` so need to be destroyed @@ -489,9 +487,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // `worker_queue_`. ScopedTaskSafety task_safety_; - // Public methods are proxied to the task queues. The queues must be destroyed - // first to make sure no tasks run that use other members. - rtc::TaskQueue encoder_queue_; + std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue_; }; } // namespace webrtc diff --git a/third_party/libwebrtc/video/video_stream_encoder_unittest.cc b/third_party/libwebrtc/video/video_stream_encoder_unittest.cc index 6fa99081cd..d752e1b23b 100644 --- a/third_party/libwebrtc/video/video_stream_encoder_unittest.cc +++ b/third_party/libwebrtc/video/video_stream_encoder_unittest.cc @@ -875,8 +875,9 @@ class VideoStreamEncoderTest : public ::testing::Test { "EncoderQueue", TaskQueueFactory::Priority::NORMAL); TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); std::unique_ptr<FrameCadenceAdapterInterface> cadence_adapter = - FrameCadenceAdapterInterface::Create(time_controller_.GetClock(), - encoder_queue_ptr, field_trials_); + FrameCadenceAdapterInterface::Create( + time_controller_.GetClock(), encoder_queue_ptr, + /*metronome=*/nullptr, /*worker_queue=*/nullptr, field_trials_); video_stream_encoder_ = std::make_unique<VideoStreamEncoderUnderTest>( &time_controller_, std::move(cadence_adapter), std::move(encoder_queue), stats_proxy_.get(), video_send_config_.encoder_settings, @@ -9556,7 +9557,7 @@ TEST(VideoStreamEncoderFrameCadenceTest, "WebRTC-ZeroHertzScreenshare/Enabled/"); auto adapter = FrameCadenceAdapterInterface::Create( factory.GetTimeController()->GetClock(), encoder_queue.get(), - field_trials); + /*metronome=*/nullptr, /*worker_queue=*/nullptr, field_trials); FrameCadenceAdapterInterface* adapter_ptr = adapter.get(); MockVideoSourceInterface mock_source; |