Update content setting for app banners to store more information.
[chromium-blink-merge.git] / remoting / host / video_scheduler.cc
blobe2a3f945533aec44f68f2d58c1860f07494e90a5
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/host/video_scheduler.h"
7 #include <algorithm>
9 #include "base/bind.h"
10 #include "base/callback.h"
11 #include "base/logging.h"
12 #include "base/memory/scoped_ptr.h"
13 #include "base/message_loop/message_loop_proxy.h"
14 #include "base/stl_util.h"
15 #include "base/sys_info.h"
16 #include "base/time/time.h"
17 #include "remoting/proto/control.pb.h"
18 #include "remoting/proto/internal.pb.h"
19 #include "remoting/proto/video.pb.h"
20 #include "remoting/protocol/cursor_shape_stub.h"
21 #include "remoting/protocol/message_decoder.h"
22 #include "remoting/protocol/video_stub.h"
23 #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
24 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
25 #include "third_party/webrtc/modules/desktop_capture/mouse_cursor.h"
27 namespace remoting {
29 // Maximum number of frames that can be processed simultaneously.
30 // TODO(hclam): Move this value to CaptureScheduler.
31 static const int kMaxPendingFrames = 2;
33 // Interval between empty keep-alive frames. These frames are sent only when the
34 // stream is paused or inactive for some other reason (e.g. when blocked on
35 // capturer). To prevent PseudoTCP from resetting congestion window this value
36 // must be smaller than the minimum RTO used in PseudoTCP, which is 250ms.
37 static const int kKeepAlivePacketIntervalMs = 200;
39 static bool g_enable_timestamps = false;
41 // static
42 void VideoScheduler::EnableTimestampsForTests() {
43 g_enable_timestamps = true;
46 VideoScheduler::VideoScheduler(
47 scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner,
48 scoped_refptr<base::SingleThreadTaskRunner> encode_task_runner,
49 scoped_refptr<base::SingleThreadTaskRunner> network_task_runner,
50 scoped_ptr<webrtc::DesktopCapturer> capturer,
51 scoped_ptr<webrtc::MouseCursorMonitor> mouse_cursor_monitor,
52 scoped_ptr<VideoEncoder> encoder,
53 protocol::CursorShapeStub* cursor_stub,
54 protocol::VideoStub* video_stub)
55 : capture_task_runner_(capture_task_runner),
56 encode_task_runner_(encode_task_runner),
57 network_task_runner_(network_task_runner),
58 capturer_(capturer.Pass()),
59 mouse_cursor_monitor_(mouse_cursor_monitor.Pass()),
60 encoder_(encoder.Pass()),
61 cursor_stub_(cursor_stub),
62 video_stub_(video_stub),
63 pending_frames_(0),
64 capture_pending_(false),
65 did_skip_frame_(false),
66 is_paused_(false),
67 latest_event_timestamp_(0) {
68 DCHECK(network_task_runner_->BelongsToCurrentThread());
69 DCHECK(capturer_);
70 DCHECK(mouse_cursor_monitor_);
71 DCHECK(encoder_);
72 DCHECK(cursor_stub_);
73 DCHECK(video_stub_);
76 // Public methods --------------------------------------------------------------
78 webrtc::SharedMemory* VideoScheduler::CreateSharedMemory(size_t size) {
79 return nullptr;
82 void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
83 DCHECK(capture_task_runner_->BelongsToCurrentThread());
85 capture_pending_ = false;
87 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
89 if (owned_frame) {
90 scheduler_.RecordCaptureTime(
91 base::TimeDelta::FromMilliseconds(owned_frame->capture_time_ms()));
94 // Even when |frame| is nullptr we still need to post it to the encode thread
95 // to make sure frames are freed in the same order they are received and
96 // that we don't start capturing frame n+2 before frame n is freed.
97 encode_task_runner_->PostTask(
98 FROM_HERE, base::Bind(&VideoScheduler::EncodeFrame, this,
99 base::Passed(&owned_frame), latest_event_timestamp_,
100 base::TimeTicks::Now()));
102 // If a frame was skipped, try to capture it again.
103 if (did_skip_frame_) {
104 capture_task_runner_->PostTask(
105 FROM_HERE, base::Bind(&VideoScheduler::CaptureNextFrame, this));
109 void VideoScheduler::OnMouseCursor(webrtc::MouseCursor* cursor) {
110 DCHECK(capture_task_runner_->BelongsToCurrentThread());
112 scoped_ptr<webrtc::MouseCursor> owned_cursor(cursor);
114 // Do nothing if the scheduler is being stopped.
115 if (!capturer_)
116 return;
118 scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
119 new protocol::CursorShapeInfo());
120 cursor_proto->set_width(cursor->image()->size().width());
121 cursor_proto->set_height(cursor->image()->size().height());
122 cursor_proto->set_hotspot_x(cursor->hotspot().x());
123 cursor_proto->set_hotspot_y(cursor->hotspot().y());
125 cursor_proto->set_data(std::string());
126 uint8_t* current_row = cursor->image()->data();
127 for (int y = 0; y < cursor->image()->size().height(); ++y) {
128 cursor_proto->mutable_data()->append(
129 current_row,
130 current_row + cursor->image()->size().width() *
131 webrtc::DesktopFrame::kBytesPerPixel);
132 current_row += cursor->image()->stride();
135 network_task_runner_->PostTask(
136 FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
137 base::Passed(&cursor_proto)));
140 void VideoScheduler::OnMouseCursorPosition(
141 webrtc::MouseCursorMonitor::CursorState state,
142 const webrtc::DesktopVector& position) {
143 // We're not subscribing to mouse position changes.
144 NOTREACHED();
147 void VideoScheduler::Start() {
148 DCHECK(network_task_runner_->BelongsToCurrentThread());
150 capture_task_runner_->PostTask(
151 FROM_HERE, base::Bind(&VideoScheduler::StartOnCaptureThread, this));
154 void VideoScheduler::Stop() {
155 DCHECK(network_task_runner_->BelongsToCurrentThread());
157 // Clear stubs to prevent further updates reaching the client.
158 cursor_stub_ = nullptr;
159 video_stub_ = nullptr;
161 keep_alive_timer_.reset();
163 capture_task_runner_->PostTask(
164 FROM_HERE, base::Bind(&VideoScheduler::StopOnCaptureThread, this));
167 void VideoScheduler::Pause(bool pause) {
168 if (!capture_task_runner_->BelongsToCurrentThread()) {
169 DCHECK(network_task_runner_->BelongsToCurrentThread());
170 capture_task_runner_->PostTask(
171 FROM_HERE, base::Bind(&VideoScheduler::Pause, this, pause));
172 return;
175 if (is_paused_ != pause) {
176 is_paused_ = pause;
178 // Restart captures if we're resuming and there are none scheduled.
179 if (!is_paused_ && capture_timer_ && !capture_timer_->IsRunning())
180 CaptureNextFrame();
184 void VideoScheduler::SetLatestEventTimestamp(int64 latest_event_timestamp) {
185 if (!capture_task_runner_->BelongsToCurrentThread()) {
186 DCHECK(network_task_runner_->BelongsToCurrentThread());
187 capture_task_runner_->PostTask(
188 FROM_HERE, base::Bind(&VideoScheduler::SetLatestEventTimestamp,
189 this, latest_event_timestamp));
190 return;
193 latest_event_timestamp_ = latest_event_timestamp;
196 void VideoScheduler::SetLosslessEncode(bool want_lossless) {
197 if (!encode_task_runner_->BelongsToCurrentThread()) {
198 DCHECK(network_task_runner_->BelongsToCurrentThread());
199 encode_task_runner_->PostTask(
200 FROM_HERE, base::Bind(&VideoScheduler::SetLosslessEncode,
201 this, want_lossless));
202 return;
205 encoder_->SetLosslessEncode(want_lossless);
208 void VideoScheduler::SetLosslessColor(bool want_lossless) {
209 if (!encode_task_runner_->BelongsToCurrentThread()) {
210 DCHECK(network_task_runner_->BelongsToCurrentThread());
211 encode_task_runner_->PostTask(
212 FROM_HERE, base::Bind(&VideoScheduler::SetLosslessColor,
213 this, want_lossless));
214 return;
217 encoder_->SetLosslessColor(want_lossless);
220 // Private methods -----------------------------------------------------------
222 VideoScheduler::~VideoScheduler() {
223 // Destroy the capturer and encoder on their respective threads.
224 capture_task_runner_->DeleteSoon(FROM_HERE, capturer_.release());
225 capture_task_runner_->DeleteSoon(FROM_HERE, mouse_cursor_monitor_.release());
226 encode_task_runner_->DeleteSoon(FROM_HERE, encoder_.release());
229 // Capturer thread -------------------------------------------------------------
231 void VideoScheduler::StartOnCaptureThread() {
232 DCHECK(capture_task_runner_->BelongsToCurrentThread());
233 DCHECK(!capture_timer_);
235 // Start mouse cursor monitor.
236 mouse_cursor_monitor_->Init(this, webrtc::MouseCursorMonitor::SHAPE_ONLY);
238 // Start the capturer.
239 capturer_->Start(this);
241 capture_timer_.reset(new base::OneShotTimer<VideoScheduler>());
242 keep_alive_timer_.reset(new base::DelayTimer<VideoScheduler>(
243 FROM_HERE, base::TimeDelta::FromMilliseconds(kKeepAlivePacketIntervalMs),
244 this, &VideoScheduler::SendKeepAlivePacket));
246 // Capture first frame immediately.
247 CaptureNextFrame();
250 void VideoScheduler::StopOnCaptureThread() {
251 DCHECK(capture_task_runner_->BelongsToCurrentThread());
253 // This doesn't deleted already captured frames, so encoder can keep using the
254 // frames that were captured previously.
255 capturer_.reset();
257 // |capture_timer_| must be destroyed on the thread on which it is used.
258 capture_timer_.reset();
261 void VideoScheduler::ScheduleNextCapture() {
262 DCHECK(capture_task_runner_->BelongsToCurrentThread());
264 capture_timer_->Start(FROM_HERE,
265 scheduler_.NextCaptureDelay(),
266 this,
267 &VideoScheduler::CaptureNextFrame);
270 void VideoScheduler::CaptureNextFrame() {
271 DCHECK(capture_task_runner_->BelongsToCurrentThread());
273 // If we are stopping (|capturer_| is nullptr), or paused, then don't capture.
274 if (!capturer_ || is_paused_)
275 return;
277 // Make sure we have at most two outstanding recordings. We can simply return
278 // if we can't make a capture now, the next capture will be started by the
279 // end of an encode operation.
280 if (pending_frames_ >= kMaxPendingFrames || capture_pending_) {
281 did_skip_frame_ = true;
282 return;
285 did_skip_frame_ = false;
287 // At this point we are going to perform one capture so save the current time.
288 pending_frames_++;
289 DCHECK_LE(pending_frames_, kMaxPendingFrames);
291 // Before doing a capture schedule for the next one.
292 ScheduleNextCapture();
294 capture_pending_ = true;
296 // Capture the mouse shape.
297 mouse_cursor_monitor_->Capture();
299 // And finally perform one capture.
300 capturer_->Capture(webrtc::DesktopRegion());
303 void VideoScheduler::FrameCaptureCompleted() {
304 DCHECK(capture_task_runner_->BelongsToCurrentThread());
306 // Decrement the pending capture count.
307 pending_frames_--;
308 DCHECK_GE(pending_frames_, 0);
310 // If we've skipped a frame capture because too we had too many captures
311 // pending then schedule one now.
312 if (did_skip_frame_)
313 CaptureNextFrame();
316 // Network thread --------------------------------------------------------------
318 void VideoScheduler::SendVideoPacket(scoped_ptr<VideoPacket> packet) {
319 DCHECK(network_task_runner_->BelongsToCurrentThread());
321 if (!video_stub_)
322 return;
324 video_stub_->ProcessVideoPacket(
325 packet.Pass(), base::Bind(&VideoScheduler::OnVideoPacketSent, this));
328 void VideoScheduler::OnVideoPacketSent() {
329 DCHECK(network_task_runner_->BelongsToCurrentThread());
331 if (!video_stub_)
332 return;
334 keep_alive_timer_->Reset();
336 capture_task_runner_->PostTask(
337 FROM_HERE, base::Bind(&VideoScheduler::FrameCaptureCompleted, this));
340 void VideoScheduler::SendKeepAlivePacket() {
341 DCHECK(network_task_runner_->BelongsToCurrentThread());
343 if (!video_stub_)
344 return;
346 video_stub_->ProcessVideoPacket(
347 make_scoped_ptr(new VideoPacket()),
348 base::Bind(&VideoScheduler::OnKeepAlivePacketSent, this));
351 void VideoScheduler::OnKeepAlivePacketSent() {
352 DCHECK(network_task_runner_->BelongsToCurrentThread());
354 if (keep_alive_timer_)
355 keep_alive_timer_->Reset();
358 void VideoScheduler::SendCursorShape(
359 scoped_ptr<protocol::CursorShapeInfo> cursor_shape) {
360 DCHECK(network_task_runner_->BelongsToCurrentThread());
362 if (!cursor_stub_)
363 return;
365 cursor_stub_->SetCursorShape(*cursor_shape);
368 // Encoder thread --------------------------------------------------------------
370 void VideoScheduler::EncodeFrame(
371 scoped_ptr<webrtc::DesktopFrame> frame,
372 int64 latest_event_timestamp,
373 base::TimeTicks timestamp) {
374 DCHECK(encode_task_runner_->BelongsToCurrentThread());
376 // If there is nothing to encode then send an empty packet.
377 if (!frame || frame->updated_region().is_empty()) {
378 capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
379 scoped_ptr<VideoPacket> packet(new VideoPacket());
380 packet->set_latest_event_timestamp(latest_event_timestamp);
381 network_task_runner_->PostTask(
382 FROM_HERE,
383 base::Bind(
384 &VideoScheduler::SendVideoPacket, this, base::Passed(&packet)));
385 return;
388 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame);
389 packet->set_latest_event_timestamp(latest_event_timestamp);
391 if (g_enable_timestamps) {
392 packet->set_timestamp(timestamp.ToInternalValue());
395 // Destroy the frame before sending |packet| because SendVideoPacket() may
396 // trigger another frame to be captured, and the screen capturer expects the
397 // old frame to be freed by then.
398 frame.reset();
400 scheduler_.RecordEncodeTime(
401 base::TimeDelta::FromMilliseconds(packet->encode_time_ms()));
402 network_task_runner_->PostTask(
403 FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
404 base::Passed(&packet)));
407 } // namespace remoting