blob: c5e725c107d992bf0e696e6be988b6bb1b54f570 [file] [log] [blame]
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001// libjingle
2// Copyright 2010 Google Inc.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// 1. Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// 2. Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// 3. The name of the author may not be used to endorse or promote products
13// derived from this software without specific prior written permission.
14//
15// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
16// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
17// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
18// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
21// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
22// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
23// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
24// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25//
26// Implementation file of class VideoCapturer.
27
28#include "talk/media/base/videocapturer.h"
29
30#include <algorithm>
31
32#if !defined(DISABLE_YUV)
33#include "libyuv/scale_argb.h"
34#endif
35#include "talk/base/common.h"
36#include "talk/base/logging.h"
37#include "talk/base/systeminfo.h"
38#include "talk/media/base/videoprocessor.h"
39
40#if defined(HAVE_WEBRTC_VIDEO)
41#include "talk/media/webrtc/webrtcvideoframe.h"
42#endif // HAVE_WEBRTC_VIDEO
43
44
45namespace cricket {
46
47namespace {
48
49// TODO(thorcarpenter): This is a BIG hack to flush the system with black
50// frames. Frontends should coordinate to update the video state of a muted
51// user. When all frontends to this consider removing the black frame business.
52const int kNumBlackFramesOnMute = 30;
53
54// MessageHandler constants.
55enum {
56 MSG_DO_PAUSE = 0,
57 MSG_DO_UNPAUSE,
58 MSG_STATE_CHANGE
59};
60
61static const int64 kMaxDistance = ~(static_cast<int64>(1) << 63);
62static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
63static const int kDefaultScreencastFps = 5;
64typedef talk_base::TypedMessageData<CaptureState> StateChangeParams;
65
66} // namespace
67
68/////////////////////////////////////////////////////////////////////
69// Implementation of struct CapturedFrame
70/////////////////////////////////////////////////////////////////////
71CapturedFrame::CapturedFrame()
72 : width(0),
73 height(0),
74 fourcc(0),
75 pixel_width(0),
76 pixel_height(0),
77 elapsed_time(0),
78 time_stamp(0),
79 data_size(0),
80 rotation(0),
81 data(NULL) {}
82
83// TODO(fbarchard): Remove this function once lmimediaengine stops using it.
84bool CapturedFrame::GetDataSize(uint32* size) const {
85 if (!size || data_size == CapturedFrame::kUnknownDataSize) {
86 return false;
87 }
88 *size = data_size;
89 return true;
90}
91
92/////////////////////////////////////////////////////////////////////
93// Implementation of class VideoCapturer
94/////////////////////////////////////////////////////////////////////
95VideoCapturer::VideoCapturer() : thread_(talk_base::Thread::Current()) {
96 Construct();
97}
98
99VideoCapturer::VideoCapturer(talk_base::Thread* thread) : thread_(thread) {
100 Construct();
101}
102
103void VideoCapturer::Construct() {
104 ClearAspectRatio();
105 enable_camera_list_ = false;
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000106 square_pixel_aspect_ratio_ = false;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000107 capture_state_ = CS_STOPPED;
108 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
109 scaled_width_ = 0;
110 scaled_height_ = 0;
wu@webrtc.orgcadf9042013-08-30 21:24:16 +0000111 screencast_max_pixels_ = 0;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000112 muted_ = false;
113 black_frame_count_down_ = kNumBlackFramesOnMute;
henrike@webrtc.orga7b98182014-02-21 15:51:43 +0000114 enable_video_adapter_ = true;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000115}
116
117const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
118 return &filtered_supported_formats_;
119}
120
121bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
122 CaptureState result = Start(capture_format);
123 const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
124 if (!success) {
125 return false;
126 }
127 if (result == CS_RUNNING) {
128 SetCaptureState(result);
129 }
130 return true;
131}
132
133void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
134 if (ratio_w == 0 || ratio_h == 0) {
135 LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
136 << ratio_w << "x" << ratio_h;
137 return;
138 }
139 ratio_w_ = ratio_w;
140 ratio_h_ = ratio_h;
141}
142
143void VideoCapturer::ClearAspectRatio() {
144 ratio_w_ = 0;
145 ratio_h_ = 0;
146}
147
148// Override this to have more control of how your device is started/stopped.
149bool VideoCapturer::Pause(bool pause) {
150 if (pause) {
151 if (capture_state() == CS_PAUSED) {
152 return true;
153 }
154 bool is_running = capture_state() == CS_STARTING ||
155 capture_state() == CS_RUNNING;
156 if (!is_running) {
157 LOG(LS_ERROR) << "Cannot pause a stopped camera.";
158 return false;
159 }
160 LOG(LS_INFO) << "Pausing a camera.";
161 talk_base::scoped_ptr<VideoFormat> capture_format_when_paused(
162 capture_format_ ? new VideoFormat(*capture_format_) : NULL);
163 Stop();
164 SetCaptureState(CS_PAUSED);
165 // If you override this function be sure to restore the capture format
166 // after calling Stop().
167 SetCaptureFormat(capture_format_when_paused.get());
168 } else { // Unpause.
169 if (capture_state() != CS_PAUSED) {
170 LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
171 return false;
172 }
173 if (!capture_format_) {
174 LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
175 return false;
176 }
177 if (muted_) {
178 LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
179 return false;
180 }
181 LOG(LS_INFO) << "Unpausing a camera.";
182 if (!Start(*capture_format_)) {
183 LOG(LS_ERROR) << "Camera failed to start when unpausing.";
184 return false;
185 }
186 }
187 return true;
188}
189
190bool VideoCapturer::Restart(const VideoFormat& capture_format) {
191 if (!IsRunning()) {
192 return StartCapturing(capture_format);
193 }
194
195 if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
196 // The reqested format is the same; nothing to do.
197 return true;
198 }
199
200 Stop();
201 return StartCapturing(capture_format);
202}
203
204bool VideoCapturer::MuteToBlackThenPause(bool muted) {
205 if (muted == IsMuted()) {
206 return true;
207 }
208
209 LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
210 muted_ = muted; // Do this before calling Pause().
211 if (muted) {
212 // Reset black frame count down.
213 black_frame_count_down_ = kNumBlackFramesOnMute;
214 // Following frames will be overritten with black, then the camera will be
215 // paused.
216 return true;
217 }
218 // Start the camera.
219 thread_->Clear(this, MSG_DO_PAUSE);
220 return Pause(false);
221}
222
223void VideoCapturer::SetSupportedFormats(
224 const std::vector<VideoFormat>& formats) {
225 supported_formats_ = formats;
226 UpdateFilteredSupportedFormats();
227}
228
229bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
230 VideoFormat* best_format) {
231 // TODO(fbarchard): Directly support max_format.
232 UpdateFilteredSupportedFormats();
233 const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
234
235 if (supported_formats->empty()) {
236 return false;
237 }
238 LOG(LS_INFO) << " Capture Requested " << format.ToString();
239 int64 best_distance = kMaxDistance;
240 std::vector<VideoFormat>::const_iterator best = supported_formats->end();
241 std::vector<VideoFormat>::const_iterator i;
242 for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
243 int64 distance = GetFormatDistance(format, *i);
244 // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
245 // relatively bug free.
246 LOG(LS_INFO) << " Supported " << i->ToString() << " distance " << distance;
247 if (distance < best_distance) {
248 best_distance = distance;
249 best = i;
250 }
251 }
252 if (supported_formats->end() == best) {
253 LOG(LS_ERROR) << " No acceptable camera format found";
254 return false;
255 }
256
257 if (best_format) {
258 best_format->width = best->width;
259 best_format->height = best->height;
260 best_format->fourcc = best->fourcc;
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000261 best_format->interval = best->interval;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000262 LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
263 << best_format->interval << " distance " << best_distance;
264 }
265 return true;
266}
267
268void VideoCapturer::AddVideoProcessor(VideoProcessor* video_processor) {
269 talk_base::CritScope cs(&crit_);
270 ASSERT(std::find(video_processors_.begin(), video_processors_.end(),
271 video_processor) == video_processors_.end());
272 video_processors_.push_back(video_processor);
273}
274
275bool VideoCapturer::RemoveVideoProcessor(VideoProcessor* video_processor) {
276 talk_base::CritScope cs(&crit_);
277 VideoProcessors::iterator found = std::find(
278 video_processors_.begin(), video_processors_.end(), video_processor);
279 if (found == video_processors_.end()) {
280 return false;
281 }
282 video_processors_.erase(found);
283 return true;
284}
285
286void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
287 max_format_.reset(new VideoFormat(max_format));
288 LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
289 UpdateFilteredSupportedFormats();
290}
291
292std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
293 std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " ";
294 for (std::string::const_iterator i = fourcc_name.begin();
295 i < fourcc_name.end(); ++i) {
296 // Test character is printable; Avoid isprint() which asserts on negatives.
297 if (*i < 32 || *i >= 127) {
298 fourcc_name = "";
299 break;
300 }
301 }
302
303 std::ostringstream ss;
304 ss << fourcc_name << captured_frame->width << "x" << captured_frame->height
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000305 << "x" << VideoFormat::IntervalToFpsFloat(captured_frame->elapsed_time);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000306 return ss.str();
307}
308
309void VideoCapturer::OnFrameCaptured(VideoCapturer*,
310 const CapturedFrame* captured_frame) {
311 if (muted_) {
312 if (black_frame_count_down_ == 0) {
313 thread_->Post(this, MSG_DO_PAUSE, NULL);
314 } else {
315 --black_frame_count_down_;
316 }
317 }
318
319 if (SignalVideoFrame.is_empty()) {
320 return;
321 }
322#if defined(HAVE_WEBRTC_VIDEO)
323#define VIDEO_FRAME_NAME WebRtcVideoFrame
324#endif
325#if defined(VIDEO_FRAME_NAME)
326#if !defined(DISABLE_YUV)
327 if (IsScreencast()) {
328 int scaled_width, scaled_height;
wu@webrtc.orgcadf9042013-08-30 21:24:16 +0000329 if (screencast_max_pixels_ > 0) {
330 ComputeScaleMaxPixels(captured_frame->width, captured_frame->height,
331 screencast_max_pixels_, &scaled_width, &scaled_height);
332 } else {
333 int desired_screencast_fps = capture_format_.get() ?
334 VideoFormat::IntervalToFps(capture_format_->interval) :
335 kDefaultScreencastFps;
336 ComputeScale(captured_frame->width, captured_frame->height,
337 desired_screencast_fps, &scaled_width, &scaled_height);
338 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000339
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000340 if (FOURCC_ARGB == captured_frame->fourcc &&
sergeyu@chromium.org0be6aa02013-08-23 23:21:25 +0000341 (scaled_width != captured_frame->width ||
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000342 scaled_height != captured_frame->height)) {
343 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
344 LOG(LS_INFO) << "Scaling Screencast from "
345 << captured_frame->width << "x"
346 << captured_frame->height << " to "
347 << scaled_width << "x" << scaled_height;
348 scaled_width_ = scaled_width;
349 scaled_height_ = scaled_height;
350 }
351 CapturedFrame* modified_frame =
352 const_cast<CapturedFrame*>(captured_frame);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000353 // Compute new width such that width * height is less than maximum but
354 // maintains original captured frame aspect ratio.
355 // Round down width to multiple of 4 so odd width won't round up beyond
356 // maximum, and so chroma channel is even width to simplify spatial
357 // resampling.
358 libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
359 captured_frame->width * 4, captured_frame->width,
360 captured_frame->height,
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000361 reinterpret_cast<uint8*>(modified_frame->data),
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000362 scaled_width * 4, scaled_width, scaled_height,
363 libyuv::kFilterBilinear);
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000364 modified_frame->width = scaled_width;
365 modified_frame->height = scaled_height;
366 modified_frame->data_size = scaled_width * 4 * scaled_height;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000367 }
368 }
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000369
370 const int kYuy2Bpp = 2;
371 const int kArgbBpp = 4;
372 // TODO(fbarchard): Make a helper function to adjust pixels to square.
373 // TODO(fbarchard): Hook up experiment to scaling.
374 // TODO(fbarchard): Avoid scale and convert if muted.
375 // Temporary buffer is scoped here so it will persist until i420_frame.Init()
376 // makes a copy of the frame, converting to I420.
wu@webrtc.org97077a32013-10-25 21:18:33 +0000377 talk_base::scoped_ptr<uint8[]> temp_buffer;
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000378 // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
379 // a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
380 bool can_scale =
381 FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
382 FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
383
384 // If pixels are not square, optionally use vertical scaling to make them
385 // square. Square pixels simplify the rest of the pipeline, including
386 // effects and rendering.
387 if (can_scale && square_pixel_aspect_ratio_ &&
388 captured_frame->pixel_width != captured_frame->pixel_height) {
389 int scaled_width, scaled_height;
390 // modified_frame points to the captured_frame but with const casted away
391 // so it can be modified.
392 CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame);
393 // Compute the frame size that makes pixels square pixel aspect ratio.
394 ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height,
395 captured_frame->pixel_width,
396 captured_frame->pixel_height,
397 &scaled_width, &scaled_height);
398
399 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
400 LOG(LS_INFO) << "Scaling WebCam from "
401 << captured_frame->width << "x"
402 << captured_frame->height << " to "
403 << scaled_width << "x" << scaled_height
404 << " for PAR "
405 << captured_frame->pixel_width << "x"
406 << captured_frame->pixel_height;
407 scaled_width_ = scaled_width;
408 scaled_height_ = scaled_height;
409 }
410 const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp;
411 uint8* temp_buffer_data;
412 // Pixels are wide and short; Increasing height. Requires temporary buffer.
413 if (scaled_height > captured_frame->height) {
414 temp_buffer.reset(new uint8[modified_frame_size]);
415 temp_buffer_data = temp_buffer.get();
416 } else {
417 // Pixels are narrow and tall; Decreasing height. Scale will be done
418 // in place.
419 temp_buffer_data = reinterpret_cast<uint8*>(captured_frame->data);
420 }
421
422 // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp.
423 libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
424 captured_frame->width * kYuy2Bpp, // Stride for YUY2.
425 captured_frame->width * kYuy2Bpp / kArgbBpp, // Width.
426 abs(captured_frame->height), // Height.
427 temp_buffer_data,
428 scaled_width * kYuy2Bpp, // Stride for YUY2.
429 scaled_width * kYuy2Bpp / kArgbBpp, // Width.
430 abs(scaled_height), // New height.
431 libyuv::kFilterBilinear);
432 modified_frame->width = scaled_width;
433 modified_frame->height = scaled_height;
434 modified_frame->pixel_width = 1;
435 modified_frame->pixel_height = 1;
436 modified_frame->data_size = modified_frame_size;
437 modified_frame->data = temp_buffer_data;
438 }
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000439#endif // !DISABLE_YUV
mallinath@webrtc.org1b15f422013-09-06 22:56:28 +0000440
441 // Size to crop captured frame to. This adjusts the captured frames
442 // aspect ratio to match the final view aspect ratio, considering pixel
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000443 // aspect ratio and rotation. The final size may be scaled down by video
444 // adapter to better match ratio_w_ x ratio_h_.
445 // Note that abs() of frame height is passed in, because source may be
446 // inverted, but output will be positive.
447 int desired_width = captured_frame->width;
448 int desired_height = captured_frame->height;
449
450 // TODO(fbarchard): Improve logic to pad or crop.
451 // MJPG can crop vertically, but not horizontally. This logic disables crop.
452 // Alternatively we could pad the image with black, or implement a 2 step
453 // crop.
454 bool can_crop = true;
455 if (captured_frame->fourcc == FOURCC_MJPG) {
456 float cam_aspect = static_cast<float>(captured_frame->width) /
457 static_cast<float>(captured_frame->height);
458 float view_aspect = static_cast<float>(ratio_w_) /
459 static_cast<float>(ratio_h_);
460 can_crop = cam_aspect <= view_aspect;
461 }
462 if (can_crop && !IsScreencast()) {
463 // TODO(ronghuawu): The capturer should always produce the native
464 // resolution and the cropping should be done in downstream code.
465 ComputeCrop(ratio_w_, ratio_h_, captured_frame->width,
466 abs(captured_frame->height), captured_frame->pixel_width,
467 captured_frame->pixel_height, captured_frame->rotation,
468 &desired_width, &desired_height);
469 }
470
471 VIDEO_FRAME_NAME i420_frame;
wu@webrtc.org16d62542013-11-05 23:45:14 +0000472 if (!i420_frame.Alias(captured_frame, desired_width, desired_height)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000473 // TODO(fbarchard): LOG more information about captured frame attributes.
474 LOG(LS_ERROR) << "Couldn't convert to I420! "
475 << "From " << ToString(captured_frame) << " To "
476 << desired_width << " x " << desired_height;
477 return;
478 }
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000479
480 VideoFrame* adapted_frame = &i420_frame;
henrike@webrtc.orga7b98182014-02-21 15:51:43 +0000481 if (enable_video_adapter_ && !IsScreencast()) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000482 VideoFrame* out_frame = NULL;
henrike@webrtc.orga7b98182014-02-21 15:51:43 +0000483 video_adapter_.AdaptFrame(adapted_frame, &out_frame);
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000484 if (!out_frame) {
485 return; // VideoAdapter dropped the frame.
486 }
487 adapted_frame = out_frame;
488 }
489
490 if (!muted_ && !ApplyProcessors(adapted_frame)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000491 // Processor dropped the frame.
492 return;
493 }
494 if (muted_) {
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000495 adapted_frame->SetToBlack();
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000496 }
mallinath@webrtc.org67ee6b92014-02-03 16:57:16 +0000497 SignalVideoFrame(this, adapted_frame);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000498#endif // VIDEO_FRAME_NAME
499}
500
501void VideoCapturer::SetCaptureState(CaptureState state) {
502 if (state == capture_state_) {
503 // Don't trigger a state changed callback if the state hasn't changed.
504 return;
505 }
506 StateChangeParams* state_params = new StateChangeParams(state);
507 capture_state_ = state;
508 thread_->Post(this, MSG_STATE_CHANGE, state_params);
509}
510
511void VideoCapturer::OnMessage(talk_base::Message* message) {
512 switch (message->message_id) {
513 case MSG_STATE_CHANGE: {
514 talk_base::scoped_ptr<StateChangeParams> p(
515 static_cast<StateChangeParams*>(message->pdata));
516 SignalStateChange(this, p->data());
517 break;
518 }
519 case MSG_DO_PAUSE: {
520 Pause(true);
521 break;
522 }
523 case MSG_DO_UNPAUSE: {
524 Pause(false);
525 break;
526 }
527 default: {
528 ASSERT(false);
529 }
530 }
531}
532
533// Get the distance between the supported and desired formats.
534// Prioritization is done according to this algorithm:
535// 1) Width closeness. If not same, we prefer wider.
536// 2) Height closeness. If not same, we prefer higher.
537// 3) Framerate closeness. If not same, we prefer faster.
538// 4) Compression. If desired format has a specific fourcc, we need exact match;
539// otherwise, we use preference.
540int64 VideoCapturer::GetFormatDistance(const VideoFormat& desired,
541 const VideoFormat& supported) {
542 int64 distance = kMaxDistance;
543
544 // Check fourcc.
545 uint32 supported_fourcc = CanonicalFourCC(supported.fourcc);
546 int64 delta_fourcc = kMaxDistance;
547 if (FOURCC_ANY == desired.fourcc) {
548 // Any fourcc is OK for the desired. Use preference to find best fourcc.
549 std::vector<uint32> preferred_fourccs;
550 if (!GetPreferredFourccs(&preferred_fourccs)) {
551 return distance;
552 }
553
554 for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
555 if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
556 delta_fourcc = i;
557#ifdef LINUX
558 // For HD avoid YU12 which is a software conversion and has 2 bugs
559 // b/7326348 b/6960899. Reenable when fixed.
560 if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
561 supported_fourcc == FOURCC_YV12)) {
562 delta_fourcc += kYU12Penalty;
563 }
564#endif
565 break;
566 }
567 }
568 } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
569 delta_fourcc = 0; // Need exact match.
570 }
571
572 if (kMaxDistance == delta_fourcc) {
573 // Failed to match fourcc.
574 return distance;
575 }
576
577 // Check resolution and fps.
578 int desired_width = desired.width;
579 int desired_height = desired.height;
580 int64 delta_w = supported.width - desired_width;
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000581 float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
582 float delta_fps =
583 supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000584 // Check height of supported height compared to height we would like it to be.
585 int64 aspect_h =
586 desired_width ? supported.width * desired_height / desired_width
587 : desired_height;
588 int64 delta_h = supported.height - aspect_h;
589
590 distance = 0;
591 // Set high penalty if the supported format is lower than the desired format.
592 // 3x means we would prefer down to down to 3/4, than up to double.
593 // But we'd prefer up to double than down to 1/2. This is conservative,
594 // strongly avoiding going down in resolution, similar to
595 // the old method, but not completely ruling it out in extreme situations.
596 // It also ignores framerate, which is often very low at high resolutions.
597 // TODO(fbarchard): Improve logic to use weighted factors.
598 static const int kDownPenalty = -3;
599 if (delta_w < 0) {
600 delta_w = delta_w * kDownPenalty;
601 }
602 if (delta_h < 0) {
603 delta_h = delta_h * kDownPenalty;
604 }
605 // Require camera fps to be at least 80% of what is requested if resolution
606 // matches.
607 // Require camera fps to be at least 96% of what is requested, or higher,
608 // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
609 if (delta_fps < 0) {
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000610 float min_desirable_fps = delta_w ?
611 VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
612 VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000613 delta_fps = -delta_fps;
614 if (supported_fps < min_desirable_fps) {
615 distance |= static_cast<int64>(1) << 62;
616 } else {
617 distance |= static_cast<int64>(1) << 15;
618 }
619 }
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000620 int64 idelta_fps = static_cast<int>(delta_fps);
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000621
622 // 12 bits for width and height and 8 bits for fps and fourcc.
623 distance |=
sergeyu@chromium.org4b26e2e2014-01-15 23:15:54 +0000624 (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000625
626 return distance;
627}
628
629bool VideoCapturer::ApplyProcessors(VideoFrame* video_frame) {
630 bool drop_frame = false;
631 talk_base::CritScope cs(&crit_);
632 for (VideoProcessors::iterator iter = video_processors_.begin();
633 iter != video_processors_.end(); ++iter) {
634 (*iter)->OnFrame(kDummyVideoSsrc, video_frame, &drop_frame);
635 if (drop_frame) {
636 return false;
637 }
638 }
639 return true;
640}
641
642void VideoCapturer::UpdateFilteredSupportedFormats() {
643 filtered_supported_formats_.clear();
644 filtered_supported_formats_ = supported_formats_;
645 if (!max_format_) {
646 return;
647 }
648 std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
649 while (iter != filtered_supported_formats_.end()) {
650 if (ShouldFilterFormat(*iter)) {
651 iter = filtered_supported_formats_.erase(iter);
652 } else {
653 ++iter;
654 }
655 }
656 if (filtered_supported_formats_.empty()) {
657 // The device only captures at resolutions higher than |max_format_| this
658 // indicates that |max_format_| should be ignored as it is better to capture
659 // at too high a resolution than to not capture at all.
660 filtered_supported_formats_ = supported_formats_;
661 }
662}
663
664bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
665 if (!enable_camera_list_) {
666 return false;
667 }
668 return format.width > max_format_->width ||
669 format.height > max_format_->height;
670}
671
672} // namespace cricket