blob: bed3b0c6166e189791c2aca01f81e86b917f5eeb [file] [log] [blame]
jlmiller@webrtc.org5f93d0a2015-01-20 21:36:13 +00001/*
2 * libjingle
3 * Copyright 2010 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
henrike@webrtc.org28e20752013-07-10 00:45:36 +000028// Implementation file of class VideoCapturer.
29
30#include "talk/media/base/videocapturer.h"
31
32#include <algorithm>
33
henrike@webrtc.org28e20752013-07-10 00:45:36 +000034#include "libyuv/scale_argb.h"
buildbot@webrtc.orga09a9992014-08-13 17:26:08 +000035#include "talk/media/base/videoframefactory.h"
36#include "talk/media/base/videoprocessor.h"
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +000037#include "webrtc/base/common.h"
38#include "webrtc/base/logging.h"
39#include "webrtc/base/systeminfo.h"
henrike@webrtc.org28e20752013-07-10 00:45:36 +000040
41#if defined(HAVE_WEBRTC_VIDEO)
42#include "talk/media/webrtc/webrtcvideoframe.h"
buildbot@webrtc.org4f0d4012014-08-07 04:47:36 +000043#include "talk/media/webrtc/webrtcvideoframefactory.h"
henrike@webrtc.org28e20752013-07-10 00:45:36 +000044#endif // HAVE_WEBRTC_VIDEO
45
henrike@webrtc.org28e20752013-07-10 00:45:36 +000046namespace cricket {
47
48namespace {
49
50// TODO(thorcarpenter): This is a BIG hack to flush the system with black
51// frames. Frontends should coordinate to update the video state of a muted
52// user. When all frontends to this consider removing the black frame business.
53const int kNumBlackFramesOnMute = 30;
54
55// MessageHandler constants.
56enum {
57 MSG_DO_PAUSE = 0,
58 MSG_DO_UNPAUSE,
59 MSG_STATE_CHANGE
60};
61
62static const int64 kMaxDistance = ~(static_cast<int64>(1) << 63);
henrike@webrtc.orgf5bebd42014-04-04 18:39:07 +000063#ifdef LINUX
henrike@webrtc.org28e20752013-07-10 00:45:36 +000064static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
henrike@webrtc.orgf5bebd42014-04-04 18:39:07 +000065#endif
henrike@webrtc.org28e20752013-07-10 00:45:36 +000066static const int kDefaultScreencastFps = 5;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +000067typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
henrike@webrtc.org28e20752013-07-10 00:45:36 +000068
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +000069// Limit stats data collections to ~20 seconds of 30fps data before dropping
70// old data in case stats aren't reset for long periods of time.
71static const size_t kMaxAccumulatorSize = 600;
72
henrike@webrtc.org28e20752013-07-10 00:45:36 +000073} // namespace
74
75/////////////////////////////////////////////////////////////////////
76// Implementation of struct CapturedFrame
77/////////////////////////////////////////////////////////////////////
78CapturedFrame::CapturedFrame()
79 : width(0),
80 height(0),
81 fourcc(0),
82 pixel_width(0),
83 pixel_height(0),
84 elapsed_time(0),
85 time_stamp(0),
86 data_size(0),
87 rotation(0),
88 data(NULL) {}
89
90// TODO(fbarchard): Remove this function once lmimediaengine stops using it.
91bool CapturedFrame::GetDataSize(uint32* size) const {
92 if (!size || data_size == CapturedFrame::kUnknownDataSize) {
93 return false;
94 }
95 *size = data_size;
96 return true;
97}
98
guoweis@webrtc.org6c930c72015-02-09 01:28:12 +000099webrtc::VideoRotation CapturedFrame::GetRotation() const {
100 ASSERT(rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270);
101 return static_cast<webrtc::VideoRotation>(rotation);
102}
103
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000104/////////////////////////////////////////////////////////////////////
105// Implementation of class VideoCapturer
106/////////////////////////////////////////////////////////////////////
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000107VideoCapturer::VideoCapturer()
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000108 : thread_(rtc::Thread::Current()),
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000109 adapt_frame_drops_data_(kMaxAccumulatorSize),
110 effect_frame_drops_data_(kMaxAccumulatorSize),
guoweis@webrtc.org1226e922015-02-11 18:37:54 +0000111 frame_time_data_(kMaxAccumulatorSize),
112 apply_rotation_(true) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000113 Construct();
114}
115
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000116VideoCapturer::VideoCapturer(rtc::Thread* thread)
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000117 : thread_(thread),
118 adapt_frame_drops_data_(kMaxAccumulatorSize),
119 effect_frame_drops_data_(kMaxAccumulatorSize),
guoweis@webrtc.org1226e922015-02-11 18:37:54 +0000120 frame_time_data_(kMaxAccumulatorSize),
121 apply_rotation_(true) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000122 Construct();
123}
124
125void VideoCapturer::Construct() {
126 ClearAspectRatio();
127 enable_camera_list_ = false;
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000128 square_pixel_aspect_ratio_ = false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000129 capture_state_ = CS_STOPPED;
130 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
131 scaled_width_ = 0;
132 scaled_height_ = 0;
wu@webrtc.orgcadf9042013-08-30 21:24:16 +0000133 screencast_max_pixels_ = 0;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000134 muted_ = false;
135 black_frame_count_down_ = kNumBlackFramesOnMute;
henrike@webrtc.orga7b98182014-02-21 15:51:43 +0000136 enable_video_adapter_ = true;
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000137 adapt_frame_drops_ = 0;
138 effect_frame_drops_ = 0;
139 previous_frame_time_ = 0.0;
buildbot@webrtc.orgc3df61e2014-08-13 23:57:23 +0000140#ifdef HAVE_WEBRTC_VIDEO
141 // There are lots of video capturers out there that don't call
142 // set_frame_factory. We can either go change all of them, or we
143 // can set this default.
144 // TODO(pthatcher): Remove this hack and require the frame factory
145 // to be passed in the constructor.
146 set_frame_factory(new WebRtcVideoFrameFactory());
147#endif
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000148}
149
150const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
151 return &filtered_supported_formats_;
152}
153
154bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000155 previous_frame_time_ = frame_length_time_reporter_.TimerNow();
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000156 CaptureState result = Start(capture_format);
157 const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
158 if (!success) {
159 return false;
160 }
161 if (result == CS_RUNNING) {
162 SetCaptureState(result);
163 }
164 return true;
165}
166
167void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
168 if (ratio_w == 0 || ratio_h == 0) {
169 LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
170 << ratio_w << "x" << ratio_h;
171 return;
172 }
173 ratio_w_ = ratio_w;
174 ratio_h_ = ratio_h;
175}
176
177void VideoCapturer::ClearAspectRatio() {
178 ratio_w_ = 0;
179 ratio_h_ = 0;
180}
181
182// Override this to have more control of how your device is started/stopped.
183bool VideoCapturer::Pause(bool pause) {
184 if (pause) {
185 if (capture_state() == CS_PAUSED) {
186 return true;
187 }
188 bool is_running = capture_state() == CS_STARTING ||
189 capture_state() == CS_RUNNING;
190 if (!is_running) {
191 LOG(LS_ERROR) << "Cannot pause a stopped camera.";
192 return false;
193 }
194 LOG(LS_INFO) << "Pausing a camera.";
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000195 rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000196 capture_format_ ? new VideoFormat(*capture_format_) : NULL);
197 Stop();
198 SetCaptureState(CS_PAUSED);
199 // If you override this function be sure to restore the capture format
200 // after calling Stop().
201 SetCaptureFormat(capture_format_when_paused.get());
202 } else { // Unpause.
203 if (capture_state() != CS_PAUSED) {
204 LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
205 return false;
206 }
207 if (!capture_format_) {
208 LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
209 return false;
210 }
211 if (muted_) {
212 LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
213 return false;
214 }
215 LOG(LS_INFO) << "Unpausing a camera.";
216 if (!Start(*capture_format_)) {
217 LOG(LS_ERROR) << "Camera failed to start when unpausing.";
218 return false;
219 }
220 }
221 return true;
222}
223
224bool VideoCapturer::Restart(const VideoFormat& capture_format) {
225 if (!IsRunning()) {
226 return StartCapturing(capture_format);
227 }
228
229 if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
230 // The reqested format is the same; nothing to do.
231 return true;
232 }
233
234 Stop();
235 return StartCapturing(capture_format);
236}
237
238bool VideoCapturer::MuteToBlackThenPause(bool muted) {
239 if (muted == IsMuted()) {
240 return true;
241 }
242
243 LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
244 muted_ = muted; // Do this before calling Pause().
245 if (muted) {
246 // Reset black frame count down.
247 black_frame_count_down_ = kNumBlackFramesOnMute;
248 // Following frames will be overritten with black, then the camera will be
249 // paused.
250 return true;
251 }
252 // Start the camera.
253 thread_->Clear(this, MSG_DO_PAUSE);
254 return Pause(false);
255}
256
guoweis@webrtc.org1226e922015-02-11 18:37:54 +0000257bool VideoCapturer::SetApplyRotation(bool enable) {
258 apply_rotation_ = enable;
259 if (frame_factory_) {
260 frame_factory_->SetApplyRotation(apply_rotation_);
261 }
262 return true;
263}
264
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000265void VideoCapturer::SetSupportedFormats(
266 const std::vector<VideoFormat>& formats) {
267 supported_formats_ = formats;
268 UpdateFilteredSupportedFormats();
269}
270
271bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
272 VideoFormat* best_format) {
273 // TODO(fbarchard): Directly support max_format.
274 UpdateFilteredSupportedFormats();
275 const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
276
277 if (supported_formats->empty()) {
278 return false;
279 }
280 LOG(LS_INFO) << " Capture Requested " << format.ToString();
281 int64 best_distance = kMaxDistance;
282 std::vector<VideoFormat>::const_iterator best = supported_formats->end();
283 std::vector<VideoFormat>::const_iterator i;
284 for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
285 int64 distance = GetFormatDistance(format, *i);
286 // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
287 // relatively bug free.
288 LOG(LS_INFO) << " Supported " << i->ToString() << " distance " << distance;
289 if (distance < best_distance) {
290 best_distance = distance;
291 best = i;
292 }
293 }
294 if (supported_formats->end() == best) {
295 LOG(LS_ERROR) << " No acceptable camera format found";
296 return false;
297 }
298
299 if (best_format) {
300 best_format->width = best->width;
301 best_format->height = best->height;
302 best_format->fourcc = best->fourcc;
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000303 best_format->interval = best->interval;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000304 LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
305 << best_format->interval << " distance " << best_distance;
306 }
307 return true;
308}
309
310void VideoCapturer::AddVideoProcessor(VideoProcessor* video_processor) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000311 rtc::CritScope cs(&crit_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000312 ASSERT(std::find(video_processors_.begin(), video_processors_.end(),
313 video_processor) == video_processors_.end());
314 video_processors_.push_back(video_processor);
315}
316
317bool VideoCapturer::RemoveVideoProcessor(VideoProcessor* video_processor) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000318 rtc::CritScope cs(&crit_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000319 VideoProcessors::iterator found = std::find(
320 video_processors_.begin(), video_processors_.end(), video_processor);
321 if (found == video_processors_.end()) {
322 return false;
323 }
324 video_processors_.erase(found);
325 return true;
326}
327
328void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
329 max_format_.reset(new VideoFormat(max_format));
330 LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
331 UpdateFilteredSupportedFormats();
332}
333
334std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
335 std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " ";
336 for (std::string::const_iterator i = fourcc_name.begin();
337 i < fourcc_name.end(); ++i) {
338 // Test character is printable; Avoid isprint() which asserts on negatives.
339 if (*i < 32 || *i >= 127) {
340 fourcc_name = "";
341 break;
342 }
343 }
344
345 std::ostringstream ss;
346 ss << fourcc_name << captured_frame->width << "x" << captured_frame->height
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000347 << "x" << VideoFormat::IntervalToFpsFloat(captured_frame->elapsed_time);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000348 return ss.str();
349}
350
guoweis@webrtc.org1226e922015-02-11 18:37:54 +0000351void VideoCapturer::set_frame_factory(VideoFrameFactory* frame_factory) {
352 frame_factory_.reset(frame_factory);
353 if (frame_factory) {
354 frame_factory->SetApplyRotation(apply_rotation_);
355 }
356}
357
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000358void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
359 VariableInfo<int>* effect_drops_stats,
buildbot@webrtc.org0b53bd22014-05-06 17:12:36 +0000360 VariableInfo<double>* frame_time_stats,
361 VideoFormat* last_captured_frame_format) {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000362 rtc::CritScope cs(&frame_stats_crit_);
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000363 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats);
364 GetVariableSnapshot(effect_frame_drops_data_, effect_drops_stats);
365 GetVariableSnapshot(frame_time_data_, frame_time_stats);
buildbot@webrtc.org0b53bd22014-05-06 17:12:36 +0000366 *last_captured_frame_format = last_captured_frame_format_;
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000367
368 adapt_frame_drops_data_.Reset();
369 effect_frame_drops_data_.Reset();
370 frame_time_data_.Reset();
371}
372
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000373void VideoCapturer::OnFrameCaptured(VideoCapturer*,
374 const CapturedFrame* captured_frame) {
375 if (muted_) {
376 if (black_frame_count_down_ == 0) {
377 thread_->Post(this, MSG_DO_PAUSE, NULL);
378 } else {
379 --black_frame_count_down_;
380 }
381 }
382
383 if (SignalVideoFrame.is_empty()) {
384 return;
385 }
braveyao@webrtc.org086c8d52014-12-22 05:46:42 +0000386
387 // Use a temporary buffer to scale
388 rtc::scoped_ptr<uint8[]> scale_buffer;
389
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000390 if (IsScreencast()) {
391 int scaled_width, scaled_height;
wu@webrtc.orgcadf9042013-08-30 21:24:16 +0000392 if (screencast_max_pixels_ > 0) {
393 ComputeScaleMaxPixels(captured_frame->width, captured_frame->height,
394 screencast_max_pixels_, &scaled_width, &scaled_height);
395 } else {
396 int desired_screencast_fps = capture_format_.get() ?
397 VideoFormat::IntervalToFps(capture_format_->interval) :
398 kDefaultScreencastFps;
399 ComputeScale(captured_frame->width, captured_frame->height,
400 desired_screencast_fps, &scaled_width, &scaled_height);
401 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000402
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000403 if (FOURCC_ARGB == captured_frame->fourcc &&
sergeyu@chromium.org0be6aa02013-08-23 23:21:25 +0000404 (scaled_width != captured_frame->width ||
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000405 scaled_height != captured_frame->height)) {
406 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
407 LOG(LS_INFO) << "Scaling Screencast from "
408 << captured_frame->width << "x"
409 << captured_frame->height << " to "
410 << scaled_width << "x" << scaled_height;
411 scaled_width_ = scaled_width;
412 scaled_height_ = scaled_height;
413 }
414 CapturedFrame* modified_frame =
415 const_cast<CapturedFrame*>(captured_frame);
braveyao@webrtc.org086c8d52014-12-22 05:46:42 +0000416 const int modified_frame_size = scaled_width * scaled_height * 4;
417 scale_buffer.reset(new uint8[modified_frame_size]);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000418 // Compute new width such that width * height is less than maximum but
419 // maintains original captured frame aspect ratio.
420 // Round down width to multiple of 4 so odd width won't round up beyond
421 // maximum, and so chroma channel is even width to simplify spatial
422 // resampling.
423 libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
424 captured_frame->width * 4, captured_frame->width,
425 captured_frame->height,
braveyao@webrtc.org086c8d52014-12-22 05:46:42 +0000426 scale_buffer.get(),
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000427 scaled_width * 4, scaled_width, scaled_height,
428 libyuv::kFilterBilinear);
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000429 modified_frame->width = scaled_width;
430 modified_frame->height = scaled_height;
431 modified_frame->data_size = scaled_width * 4 * scaled_height;
braveyao@webrtc.org086c8d52014-12-22 05:46:42 +0000432 modified_frame->data = scale_buffer.get();
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000433 }
434 }
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000435
436 const int kYuy2Bpp = 2;
437 const int kArgbBpp = 4;
438 // TODO(fbarchard): Make a helper function to adjust pixels to square.
439 // TODO(fbarchard): Hook up experiment to scaling.
440 // TODO(fbarchard): Avoid scale and convert if muted.
441 // Temporary buffer is scoped here so it will persist until i420_frame.Init()
442 // makes a copy of the frame, converting to I420.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000443 rtc::scoped_ptr<uint8[]> temp_buffer;
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000444 // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
445 // a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
446 bool can_scale =
447 FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
448 FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
449
450 // If pixels are not square, optionally use vertical scaling to make them
451 // square. Square pixels simplify the rest of the pipeline, including
452 // effects and rendering.
453 if (can_scale && square_pixel_aspect_ratio_ &&
454 captured_frame->pixel_width != captured_frame->pixel_height) {
455 int scaled_width, scaled_height;
456 // modified_frame points to the captured_frame but with const casted away
457 // so it can be modified.
458 CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame);
459 // Compute the frame size that makes pixels square pixel aspect ratio.
460 ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height,
461 captured_frame->pixel_width,
462 captured_frame->pixel_height,
463 &scaled_width, &scaled_height);
464
465 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
466 LOG(LS_INFO) << "Scaling WebCam from "
467 << captured_frame->width << "x"
468 << captured_frame->height << " to "
469 << scaled_width << "x" << scaled_height
470 << " for PAR "
471 << captured_frame->pixel_width << "x"
472 << captured_frame->pixel_height;
473 scaled_width_ = scaled_width;
474 scaled_height_ = scaled_height;
475 }
476 const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp;
477 uint8* temp_buffer_data;
478 // Pixels are wide and short; Increasing height. Requires temporary buffer.
479 if (scaled_height > captured_frame->height) {
480 temp_buffer.reset(new uint8[modified_frame_size]);
481 temp_buffer_data = temp_buffer.get();
482 } else {
483 // Pixels are narrow and tall; Decreasing height. Scale will be done
484 // in place.
485 temp_buffer_data = reinterpret_cast<uint8*>(captured_frame->data);
486 }
487
488 // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp.
489 libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
490 captured_frame->width * kYuy2Bpp, // Stride for YUY2.
491 captured_frame->width * kYuy2Bpp / kArgbBpp, // Width.
492 abs(captured_frame->height), // Height.
493 temp_buffer_data,
494 scaled_width * kYuy2Bpp, // Stride for YUY2.
495 scaled_width * kYuy2Bpp / kArgbBpp, // Width.
496 abs(scaled_height), // New height.
497 libyuv::kFilterBilinear);
498 modified_frame->width = scaled_width;
499 modified_frame->height = scaled_height;
500 modified_frame->pixel_width = 1;
501 modified_frame->pixel_height = 1;
502 modified_frame->data_size = modified_frame_size;
503 modified_frame->data = temp_buffer_data;
504 }
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000505
506 // Size to crop captured frame to. This adjusts the captured frames
507 // aspect ratio to match the final view aspect ratio, considering pixel
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000508 // aspect ratio and rotation. The final size may be scaled down by video
509 // adapter to better match ratio_w_ x ratio_h_.
510 // Note that abs() of frame height is passed in, because source may be
511 // inverted, but output will be positive.
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000512 int cropped_width = captured_frame->width;
513 int cropped_height = captured_frame->height;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000514
515 // TODO(fbarchard): Improve logic to pad or crop.
516 // MJPG can crop vertically, but not horizontally. This logic disables crop.
517 // Alternatively we could pad the image with black, or implement a 2 step
518 // crop.
519 bool can_crop = true;
520 if (captured_frame->fourcc == FOURCC_MJPG) {
521 float cam_aspect = static_cast<float>(captured_frame->width) /
522 static_cast<float>(captured_frame->height);
523 float view_aspect = static_cast<float>(ratio_w_) /
524 static_cast<float>(ratio_h_);
525 can_crop = cam_aspect <= view_aspect;
526 }
527 if (can_crop && !IsScreencast()) {
528 // TODO(ronghuawu): The capturer should always produce the native
529 // resolution and the cropping should be done in downstream code.
530 ComputeCrop(ratio_w_, ratio_h_, captured_frame->width,
531 abs(captured_frame->height), captured_frame->pixel_width,
532 captured_frame->pixel_height, captured_frame->rotation,
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000533 &cropped_width, &cropped_height);
534 }
535
536 int adapted_width = cropped_width;
537 int adapted_height = cropped_height;
538 if (enable_video_adapter_ && !IsScreencast()) {
539 const VideoFormat adapted_format =
540 video_adapter_.AdaptFrameResolution(cropped_width, cropped_height);
541 if (adapted_format.IsSize0x0()) {
542 // VideoAdapter dropped the frame.
543 ++adapt_frame_drops_;
544 return;
545 }
546 adapted_width = adapted_format.width;
547 adapted_height = adapted_format.height;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000548 }
549
buildbot@webrtc.org4f0d4012014-08-07 04:47:36 +0000550 if (!frame_factory_) {
551 LOG(LS_ERROR) << "No video frame factory.";
552 return;
553 }
554
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000555 rtc::scoped_ptr<VideoFrame> adapted_frame(
556 frame_factory_->CreateAliasedFrame(captured_frame,
557 cropped_width, cropped_height,
558 adapted_width, adapted_height));
559
560 if (!adapted_frame) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000561 // TODO(fbarchard): LOG more information about captured frame attributes.
562 LOG(LS_ERROR) << "Couldn't convert to I420! "
563 << "From " << ToString(captured_frame) << " To "
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000564 << cropped_width << " x " << cropped_height;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000565 return;
566 }
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000567
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000568 if (!muted_ && !ApplyProcessors(adapted_frame.get())) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000569 // Processor dropped the frame.
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000570 ++effect_frame_drops_;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000571 return;
572 }
pbos@webrtc.orgea894952015-02-27 08:56:14 +0000573 if (muted_) {
buildbot@webrtc.org4f0d4012014-08-07 04:47:36 +0000574 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000575 adapted_frame->SetToBlack();
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000576 }
magjed@webrtc.orgf58b4552014-11-19 18:09:14 +0000577 SignalVideoFrame(this, adapted_frame.get());
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000578
buildbot@webrtc.org0b53bd22014-05-06 17:12:36 +0000579 UpdateStats(captured_frame);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000580}
581
582void VideoCapturer::SetCaptureState(CaptureState state) {
583 if (state == capture_state_) {
584 // Don't trigger a state changed callback if the state hasn't changed.
585 return;
586 }
587 StateChangeParams* state_params = new StateChangeParams(state);
588 capture_state_ = state;
589 thread_->Post(this, MSG_STATE_CHANGE, state_params);
590}
591
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000592void VideoCapturer::OnMessage(rtc::Message* message) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000593 switch (message->message_id) {
594 case MSG_STATE_CHANGE: {
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000595 rtc::scoped_ptr<StateChangeParams> p(
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000596 static_cast<StateChangeParams*>(message->pdata));
597 SignalStateChange(this, p->data());
598 break;
599 }
600 case MSG_DO_PAUSE: {
601 Pause(true);
602 break;
603 }
604 case MSG_DO_UNPAUSE: {
605 Pause(false);
606 break;
607 }
608 default: {
609 ASSERT(false);
610 }
611 }
612}
613
614// Get the distance between the supported and desired formats.
615// Prioritization is done according to this algorithm:
616// 1) Width closeness. If not same, we prefer wider.
617// 2) Height closeness. If not same, we prefer higher.
618// 3) Framerate closeness. If not same, we prefer faster.
619// 4) Compression. If desired format has a specific fourcc, we need exact match;
620// otherwise, we use preference.
621int64 VideoCapturer::GetFormatDistance(const VideoFormat& desired,
622 const VideoFormat& supported) {
623 int64 distance = kMaxDistance;
624
625 // Check fourcc.
626 uint32 supported_fourcc = CanonicalFourCC(supported.fourcc);
627 int64 delta_fourcc = kMaxDistance;
628 if (FOURCC_ANY == desired.fourcc) {
629 // Any fourcc is OK for the desired. Use preference to find best fourcc.
630 std::vector<uint32> preferred_fourccs;
631 if (!GetPreferredFourccs(&preferred_fourccs)) {
632 return distance;
633 }
634
635 for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
636 if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
637 delta_fourcc = i;
638#ifdef LINUX
639 // For HD avoid YU12 which is a software conversion and has 2 bugs
640 // b/7326348 b/6960899. Reenable when fixed.
641 if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
642 supported_fourcc == FOURCC_YV12)) {
643 delta_fourcc += kYU12Penalty;
644 }
645#endif
646 break;
647 }
648 }
649 } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
650 delta_fourcc = 0; // Need exact match.
651 }
652
653 if (kMaxDistance == delta_fourcc) {
654 // Failed to match fourcc.
655 return distance;
656 }
657
658 // Check resolution and fps.
659 int desired_width = desired.width;
660 int desired_height = desired.height;
661 int64 delta_w = supported.width - desired_width;
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000662 float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
663 float delta_fps =
664 supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000665 // Check height of supported height compared to height we would like it to be.
666 int64 aspect_h =
667 desired_width ? supported.width * desired_height / desired_width
668 : desired_height;
669 int64 delta_h = supported.height - aspect_h;
670
671 distance = 0;
672 // Set high penalty if the supported format is lower than the desired format.
673 // 3x means we would prefer down to down to 3/4, than up to double.
674 // But we'd prefer up to double than down to 1/2. This is conservative,
675 // strongly avoiding going down in resolution, similar to
676 // the old method, but not completely ruling it out in extreme situations.
677 // It also ignores framerate, which is often very low at high resolutions.
678 // TODO(fbarchard): Improve logic to use weighted factors.
679 static const int kDownPenalty = -3;
680 if (delta_w < 0) {
681 delta_w = delta_w * kDownPenalty;
682 }
683 if (delta_h < 0) {
684 delta_h = delta_h * kDownPenalty;
685 }
686 // Require camera fps to be at least 80% of what is requested if resolution
687 // matches.
688 // Require camera fps to be at least 96% of what is requested, or higher,
689 // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
690 if (delta_fps < 0) {
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000691 float min_desirable_fps = delta_w ?
692 VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
693 VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000694 delta_fps = -delta_fps;
695 if (supported_fps < min_desirable_fps) {
696 distance |= static_cast<int64>(1) << 62;
697 } else {
698 distance |= static_cast<int64>(1) << 15;
699 }
700 }
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000701 int64 idelta_fps = static_cast<int>(delta_fps);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000702
703 // 12 bits for width and height and 8 bits for fps and fourcc.
704 distance |=
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000705 (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000706
707 return distance;
708}
709
710bool VideoCapturer::ApplyProcessors(VideoFrame* video_frame) {
711 bool drop_frame = false;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000712 rtc::CritScope cs(&crit_);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000713 for (VideoProcessors::iterator iter = video_processors_.begin();
714 iter != video_processors_.end(); ++iter) {
715 (*iter)->OnFrame(kDummyVideoSsrc, video_frame, &drop_frame);
716 if (drop_frame) {
717 return false;
718 }
719 }
720 return true;
721}
722
723void VideoCapturer::UpdateFilteredSupportedFormats() {
724 filtered_supported_formats_.clear();
725 filtered_supported_formats_ = supported_formats_;
726 if (!max_format_) {
727 return;
728 }
729 std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
730 while (iter != filtered_supported_formats_.end()) {
731 if (ShouldFilterFormat(*iter)) {
732 iter = filtered_supported_formats_.erase(iter);
733 } else {
734 ++iter;
735 }
736 }
737 if (filtered_supported_formats_.empty()) {
738 // The device only captures at resolutions higher than |max_format_| this
739 // indicates that |max_format_| should be ignored as it is better to capture
740 // at too high a resolution than to not capture at all.
741 filtered_supported_formats_ = supported_formats_;
742 }
743}
744
745bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
746 if (!enable_camera_list_) {
747 return false;
748 }
749 return format.width > max_format_->width ||
750 format.height > max_format_->height;
751}
752
buildbot@webrtc.org0b53bd22014-05-06 17:12:36 +0000753void VideoCapturer::UpdateStats(const CapturedFrame* captured_frame) {
754 // Update stats protected from fetches from different thread.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000755 rtc::CritScope cs(&frame_stats_crit_);
buildbot@webrtc.org0b53bd22014-05-06 17:12:36 +0000756
757 last_captured_frame_format_.width = captured_frame->width;
758 last_captured_frame_format_.height = captured_frame->height;
759 // TODO(ronghuawu): Useful to report interval as well?
760 last_captured_frame_format_.interval = 0;
761 last_captured_frame_format_.fourcc = captured_frame->fourcc;
762
763 double time_now = frame_length_time_reporter_.TimerNow();
764 if (previous_frame_time_ != 0.0) {
765 adapt_frame_drops_data_.AddSample(adapt_frame_drops_);
766 effect_frame_drops_data_.AddSample(effect_frame_drops_);
767 frame_time_data_.AddSample(time_now - previous_frame_time_);
768 }
769 previous_frame_time_ = time_now;
770 effect_frame_drops_ = 0;
771 adapt_frame_drops_ = 0;
772}
773
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000774template<class T>
775void VideoCapturer::GetVariableSnapshot(
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000776 const rtc::RollingAccumulator<T>& data,
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000777 VariableInfo<T>* stats) {
778 stats->max_val = data.ComputeMax();
779 stats->mean = data.ComputeMean();
780 stats->min_val = data.ComputeMin();
781 stats->variance = data.ComputeVariance();
782}
783
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000784} // namespace cricket