pbos@webrtc.org | b5a22b1 | 2014-05-13 11:07:01 +0000 | [diff] [blame] | 1 | /* |
| 2 | * libjingle |
| 3 | * Copyright 2014 Google Inc. |
| 4 | * |
| 5 | * Redistribution and use in source and binary forms, with or without |
| 6 | * modification, are permitted provided that the following conditions are met: |
| 7 | * |
| 8 | * 1. Redistributions of source code must retain the above copyright notice, |
| 9 | * this list of conditions and the following disclaimer. |
| 10 | * 2. Redistributions in binary form must reproduce the above copyright notice, |
| 11 | * this list of conditions and the following disclaimer in the documentation |
| 12 | * and/or other materials provided with the distribution. |
| 13 | * 3. The name of the author may not be used to endorse or promote products |
| 14 | * derived from this software without specific prior written permission. |
| 15 | * |
| 16 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED |
| 17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| 18 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| 19 | * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 20 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 21 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
| 22 | * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
| 23 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
| 24 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
| 25 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | */ |
| 27 | |
| 28 | #ifdef HAVE_WEBRTC_VIDEO |
| 29 | #include "talk/media/webrtc/webrtcvideoengine2.h" |
| 30 | |
| 31 | #ifdef HAVE_CONFIG_H |
| 32 | #include <config.h> |
| 33 | #endif |
| 34 | |
| 35 | #include <math.h> |
| 36 | |
| 37 | #include <string> |
| 38 | |
| 39 | #include "libyuv/convert_from.h" |
| 40 | #include "talk/base/buffer.h" |
| 41 | #include "talk/base/logging.h" |
| 42 | #include "talk/base/stringutils.h" |
| 43 | #include "talk/media/base/videocapturer.h" |
| 44 | #include "talk/media/base/videorenderer.h" |
| 45 | #include "talk/media/webrtc/webrtcvideocapturer.h" |
| 46 | #include "talk/media/webrtc/webrtcvideoframe.h" |
| 47 | #include "talk/media/webrtc/webrtcvoiceengine.h" |
| 48 | #include "webrtc/call.h" |
| 49 | // TODO(pbos): Move codecs out of modules (webrtc:3070). |
| 50 | #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
| 51 | |
| 52 | #define UNIMPLEMENTED \ |
| 53 | LOG(LS_ERROR) << "Call to unimplemented function " << __FUNCTION__; \ |
| 54 | ASSERT(false) |
| 55 | |
| 56 | namespace cricket { |
| 57 | |
| 58 | static const int kCpuMonitorPeriodMs = 2000; // 2 seconds. |
| 59 | |
| 60 | // This constant is really an on/off, lower-level configurable NACK history |
| 61 | // duration hasn't been implemented. |
| 62 | static const int kNackHistoryMs = 1000; |
| 63 | |
| 64 | static const int kDefaultFramerate = 30; |
| 65 | static const int kMinVideoBitrate = 50; |
| 66 | static const int kMaxVideoBitrate = 2000; |
| 67 | |
| 68 | static const int kVideoMtu = 1200; |
| 69 | static const int kVideoRtpBufferSize = 65536; |
| 70 | |
| 71 | static const char kVp8PayloadName[] = "VP8"; |
| 72 | |
| 73 | static const int kDefaultRtcpReceiverReportSsrc = 1; |
| 74 | |
| 75 | struct VideoCodecPref { |
| 76 | int payload_type; |
| 77 | const char* name; |
| 78 | int rtx_payload_type; |
| 79 | } kDefaultVideoCodecPref = {100, kVp8PayloadName, 96}; |
| 80 | |
| 81 | VideoCodecPref kRedPref = {116, kRedCodecName, -1}; |
| 82 | VideoCodecPref kUlpfecPref = {117, kUlpfecCodecName, -1}; |
| 83 | |
| 84 | // The formats are sorted by the descending order of width. We use the order to |
| 85 | // find the next format for CPU and bandwidth adaptation. |
| 86 | const VideoFormatPod kDefaultVideoFormat = { |
| 87 | 640, 400, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}; |
| 88 | const VideoFormatPod kVideoFormats[] = { |
| 89 | {1280, 800, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 90 | {1280, 720, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 91 | {960, 600, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 92 | {960, 540, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 93 | kDefaultVideoFormat, |
| 94 | {640, 360, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 95 | {640, 480, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 96 | {480, 300, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 97 | {480, 270, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 98 | {480, 360, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 99 | {320, 200, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 100 | {320, 180, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 101 | {320, 240, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 102 | {240, 150, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 103 | {240, 135, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 104 | {240, 180, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 105 | {160, 100, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 106 | {160, 90, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, |
| 107 | {160, 120, FPS_TO_INTERVAL(kDefaultFramerate), FOURCC_ANY}, }; |
| 108 | |
| 109 | static bool FindFirstMatchingCodec(const std::vector<VideoCodec>& codecs, |
| 110 | const VideoCodec& requested_codec, |
| 111 | VideoCodec* matching_codec) { |
| 112 | for (size_t i = 0; i < codecs.size(); ++i) { |
| 113 | if (requested_codec.Matches(codecs[i])) { |
| 114 | *matching_codec = codecs[i]; |
| 115 | return true; |
| 116 | } |
| 117 | } |
| 118 | return false; |
| 119 | } |
| 120 | static bool FindBestVideoFormat(int max_width, |
| 121 | int max_height, |
| 122 | int aspect_width, |
| 123 | int aspect_height, |
| 124 | VideoFormat* video_format) { |
| 125 | assert(max_width > 0); |
| 126 | assert(max_height > 0); |
| 127 | assert(aspect_width > 0); |
| 128 | assert(aspect_height > 0); |
| 129 | VideoFormat best_format; |
| 130 | for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) { |
| 131 | const VideoFormat format(kVideoFormats[i]); |
| 132 | |
| 133 | // Skip any format that is larger than the local or remote maximums, or |
| 134 | // smaller than the current best match |
| 135 | if (format.width > max_width || format.height > max_height || |
| 136 | (format.width < best_format.width && |
| 137 | format.height < best_format.height)) { |
| 138 | continue; |
| 139 | } |
| 140 | |
| 141 | // If we don't have any matches yet, this is the best so far. |
| 142 | if (best_format.width == 0) { |
| 143 | best_format = format; |
| 144 | continue; |
| 145 | } |
| 146 | |
| 147 | // Prefer closer aspect ratios i.e: |
| 148 | // |format| aspect - requested aspect < |
| 149 | // |best_format| aspect - requested aspect |
| 150 | if (abs(format.width * aspect_height * best_format.height - |
| 151 | aspect_width * format.height * best_format.height) < |
| 152 | abs(best_format.width * aspect_height * format.height - |
| 153 | aspect_width * format.height * best_format.height)) { |
| 154 | best_format = format; |
| 155 | } |
| 156 | } |
| 157 | if (best_format.width != 0) { |
| 158 | *video_format = best_format; |
| 159 | return true; |
| 160 | } |
| 161 | return false; |
| 162 | } |
| 163 | |
| 164 | static VideoCodec DefaultVideoCodec() { |
| 165 | VideoCodec default_codec(kDefaultVideoCodecPref.payload_type, |
| 166 | kDefaultVideoCodecPref.name, |
| 167 | kDefaultVideoFormat.width, |
| 168 | kDefaultVideoFormat.height, |
| 169 | kDefaultFramerate, |
| 170 | 0); |
| 171 | return default_codec; |
| 172 | } |
| 173 | |
| 174 | static VideoCodec DefaultRedCodec() { |
| 175 | return VideoCodec(kRedPref.payload_type, kRedPref.name, 0, 0, 0, 0); |
| 176 | } |
| 177 | |
| 178 | static VideoCodec DefaultUlpfecCodec() { |
| 179 | return VideoCodec(kUlpfecPref.payload_type, kUlpfecPref.name, 0, 0, 0, 0); |
| 180 | } |
| 181 | |
| 182 | static std::vector<VideoCodec> DefaultVideoCodecs() { |
| 183 | std::vector<VideoCodec> codecs; |
| 184 | codecs.push_back(DefaultVideoCodec()); |
| 185 | codecs.push_back(DefaultRedCodec()); |
| 186 | codecs.push_back(DefaultUlpfecCodec()); |
| 187 | if (kDefaultVideoCodecPref.rtx_payload_type != -1) { |
| 188 | codecs.push_back( |
| 189 | VideoCodec::CreateRtxCodec(kDefaultVideoCodecPref.rtx_payload_type, |
| 190 | kDefaultVideoCodecPref.payload_type)); |
| 191 | } |
| 192 | return codecs; |
| 193 | } |
| 194 | |
| 195 | class DefaultVideoEncoderFactory : public WebRtcVideoEncoderFactory2 { |
| 196 | public: |
| 197 | virtual bool CreateEncoderSettings( |
| 198 | webrtc::VideoSendStream::Config::EncoderSettings* encoder_settings, |
| 199 | const VideoOptions& options, |
| 200 | const VideoCodec& codec, |
| 201 | size_t num_streams) OVERRIDE { |
| 202 | if (num_streams != 1) { |
| 203 | LOG(LS_ERROR) << "Unsupported number of streams: " << num_streams; |
| 204 | return false; |
| 205 | } |
| 206 | if (!SupportsCodec(codec)) { |
| 207 | LOG(LS_ERROR) << "Can't create encoder settings for unsupported codec: '" |
| 208 | << codec.name << "'"; |
| 209 | return false; |
| 210 | } |
| 211 | |
| 212 | *encoder_settings = webrtc::VideoSendStream::Config::EncoderSettings(); |
| 213 | |
| 214 | webrtc::VideoStream stream; |
| 215 | stream.width = codec.width; |
| 216 | stream.height = codec.height; |
| 217 | stream.max_framerate = |
| 218 | codec.framerate != 0 ? codec.framerate : kDefaultFramerate; |
| 219 | |
| 220 | int min_bitrate = kMinVideoBitrate; |
| 221 | codec.GetParam(kCodecParamMinBitrate, &min_bitrate); |
| 222 | int max_bitrate = kMaxVideoBitrate; |
| 223 | codec.GetParam(kCodecParamMaxBitrate, &max_bitrate); |
| 224 | stream.min_bitrate_bps = min_bitrate * 1000; |
| 225 | stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate * 1000; |
| 226 | |
| 227 | int max_qp = 56; |
| 228 | codec.GetParam(kCodecParamMaxQuantization, &max_qp); |
| 229 | stream.max_qp = max_qp; |
| 230 | encoder_settings->streams.push_back(stream); |
| 231 | |
| 232 | encoder_settings->encoder = webrtc::VP8Encoder::Create(); |
| 233 | encoder_settings->payload_type = kDefaultVideoCodecPref.payload_type; |
| 234 | encoder_settings->payload_name = kDefaultVideoCodecPref.name; |
| 235 | |
| 236 | return true; |
| 237 | } |
| 238 | |
| 239 | virtual bool SupportsCodec(const VideoCodec& codec) OVERRIDE { |
| 240 | return _stricmp(codec.name.c_str(), kVp8PayloadName) == 0; |
| 241 | } |
| 242 | } default_encoder_factory; |
| 243 | |
| 244 | WebRtcVideoEngine2::WebRtcVideoEngine2() { |
| 245 | // Construct without a factory or voice engine. |
| 246 | Construct(NULL, NULL, new talk_base::CpuMonitor(NULL)); |
| 247 | } |
| 248 | |
| 249 | WebRtcVideoEngine2::WebRtcVideoEngine2( |
| 250 | WebRtcVideoChannelFactory* channel_factory) { |
| 251 | // Construct without a voice engine. |
| 252 | Construct(channel_factory, NULL, new talk_base::CpuMonitor(NULL)); |
| 253 | } |
| 254 | |
| 255 | void WebRtcVideoEngine2::Construct(WebRtcVideoChannelFactory* channel_factory, |
| 256 | WebRtcVoiceEngine* voice_engine, |
| 257 | talk_base::CpuMonitor* cpu_monitor) { |
| 258 | LOG(LS_INFO) << "WebRtcVideoEngine2::WebRtcVideoEngine2"; |
| 259 | worker_thread_ = NULL; |
| 260 | voice_engine_ = voice_engine; |
| 261 | initialized_ = false; |
| 262 | capture_started_ = false; |
| 263 | cpu_monitor_.reset(cpu_monitor); |
| 264 | channel_factory_ = channel_factory; |
| 265 | |
| 266 | video_codecs_ = DefaultVideoCodecs(); |
| 267 | default_codec_format_ = VideoFormat(kDefaultVideoFormat); |
| 268 | } |
| 269 | |
| 270 | WebRtcVideoEngine2::~WebRtcVideoEngine2() { |
| 271 | LOG(LS_INFO) << "WebRtcVideoEngine2::~WebRtcVideoEngine2"; |
| 272 | |
| 273 | if (initialized_) { |
| 274 | Terminate(); |
| 275 | } |
| 276 | } |
| 277 | |
| 278 | bool WebRtcVideoEngine2::Init(talk_base::Thread* worker_thread) { |
| 279 | LOG(LS_INFO) << "WebRtcVideoEngine2::Init"; |
| 280 | worker_thread_ = worker_thread; |
| 281 | ASSERT(worker_thread_ != NULL); |
| 282 | |
| 283 | cpu_monitor_->set_thread(worker_thread_); |
| 284 | if (!cpu_monitor_->Start(kCpuMonitorPeriodMs)) { |
| 285 | LOG(LS_ERROR) << "Failed to start CPU monitor."; |
| 286 | cpu_monitor_.reset(); |
| 287 | } |
| 288 | |
| 289 | initialized_ = true; |
| 290 | return true; |
| 291 | } |
| 292 | |
| 293 | void WebRtcVideoEngine2::Terminate() { |
| 294 | LOG(LS_INFO) << "WebRtcVideoEngine2::Terminate"; |
| 295 | |
| 296 | cpu_monitor_->Stop(); |
| 297 | |
| 298 | initialized_ = false; |
| 299 | } |
| 300 | |
| 301 | int WebRtcVideoEngine2::GetCapabilities() { return VIDEO_RECV | VIDEO_SEND; } |
| 302 | |
| 303 | bool WebRtcVideoEngine2::SetOptions(const VideoOptions& options) { |
| 304 | // TODO(pbos): Do we need this? This is a no-op in the existing |
| 305 | // WebRtcVideoEngine implementation. |
| 306 | LOG(LS_VERBOSE) << "SetOptions: " << options.ToString(); |
| 307 | // options_ = options; |
| 308 | return true; |
| 309 | } |
| 310 | |
| 311 | bool WebRtcVideoEngine2::SetDefaultEncoderConfig( |
| 312 | const VideoEncoderConfig& config) { |
| 313 | // TODO(pbos): Implement. Should be covered by corresponding unit tests. |
| 314 | LOG(LS_VERBOSE) << "SetDefaultEncoderConfig()"; |
| 315 | return true; |
| 316 | } |
| 317 | |
| 318 | VideoEncoderConfig WebRtcVideoEngine2::GetDefaultEncoderConfig() const { |
| 319 | return VideoEncoderConfig(DefaultVideoCodec()); |
| 320 | } |
| 321 | |
| 322 | WebRtcVideoChannel2* WebRtcVideoEngine2::CreateChannel( |
| 323 | VoiceMediaChannel* voice_channel) { |
| 324 | LOG(LS_INFO) << "CreateChannel: " |
| 325 | << (voice_channel != NULL ? "With" : "Without") |
| 326 | << " voice channel."; |
| 327 | WebRtcVideoChannel2* channel = |
| 328 | channel_factory_ != NULL |
| 329 | ? channel_factory_->Create(this, voice_channel) |
| 330 | : new WebRtcVideoChannel2( |
| 331 | this, voice_channel, GetDefaultVideoEncoderFactory()); |
| 332 | if (!channel->Init()) { |
| 333 | delete channel; |
| 334 | return NULL; |
| 335 | } |
| 336 | return channel; |
| 337 | } |
| 338 | |
| 339 | const std::vector<VideoCodec>& WebRtcVideoEngine2::codecs() const { |
| 340 | return video_codecs_; |
| 341 | } |
| 342 | |
| 343 | const std::vector<RtpHeaderExtension>& |
| 344 | WebRtcVideoEngine2::rtp_header_extensions() const { |
| 345 | return rtp_header_extensions_; |
| 346 | } |
| 347 | |
| 348 | void WebRtcVideoEngine2::SetLogging(int min_sev, const char* filter) { |
| 349 | // TODO(pbos): Set up logging. |
| 350 | LOG(LS_VERBOSE) << "SetLogging: " << min_sev << '"' << filter << '"'; |
| 351 | // if min_sev == -1, we keep the current log level. |
| 352 | if (min_sev < 0) { |
| 353 | assert(min_sev == -1); |
| 354 | return; |
| 355 | } |
| 356 | } |
| 357 | |
| 358 | bool WebRtcVideoEngine2::EnableTimedRender() { |
| 359 | // TODO(pbos): Figure out whether this can be removed. |
| 360 | return true; |
| 361 | } |
| 362 | |
| 363 | bool WebRtcVideoEngine2::SetLocalRenderer(VideoRenderer* renderer) { |
| 364 | // TODO(pbos): Implement or remove. Unclear which stream should be rendered |
| 365 | // locally even. |
| 366 | return true; |
| 367 | } |
| 368 | |
| 369 | // Checks to see whether we comprehend and could receive a particular codec |
| 370 | bool WebRtcVideoEngine2::FindCodec(const VideoCodec& in) { |
| 371 | // TODO(pbos): Probe encoder factory to figure out that the codec is supported |
| 372 | // if supported by the encoder factory. Add a corresponding test that fails |
| 373 | // with this code (that doesn't ask the factory). |
| 374 | for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) { |
| 375 | const VideoFormat fmt(kVideoFormats[i]); |
| 376 | if ((in.width != 0 || in.height != 0) && |
| 377 | (fmt.width != in.width || fmt.height != in.height)) { |
| 378 | continue; |
| 379 | } |
| 380 | for (size_t j = 0; j < video_codecs_.size(); ++j) { |
| 381 | VideoCodec codec(video_codecs_[j].id, video_codecs_[j].name, 0, 0, 0, 0); |
| 382 | if (codec.Matches(in)) { |
| 383 | return true; |
| 384 | } |
| 385 | } |
| 386 | } |
| 387 | return false; |
| 388 | } |
| 389 | |
| 390 | // Tells whether the |requested| codec can be transmitted or not. If it can be |
| 391 | // transmitted |out| is set with the best settings supported. Aspect ratio will |
| 392 | // be set as close to |current|'s as possible. If not set |requested|'s |
| 393 | // dimensions will be used for aspect ratio matching. |
| 394 | bool WebRtcVideoEngine2::CanSendCodec(const VideoCodec& requested, |
| 395 | const VideoCodec& current, |
| 396 | VideoCodec* out) { |
| 397 | assert(out != NULL); |
| 398 | // TODO(pbos): Implement. |
| 399 | |
| 400 | if (requested.width != requested.height && |
| 401 | (requested.height == 0 || requested.width == 0)) { |
| 402 | // 0xn and nx0 are invalid resolutions. |
| 403 | return false; |
| 404 | } |
| 405 | |
| 406 | VideoCodec matching_codec; |
| 407 | if (!FindFirstMatchingCodec(video_codecs_, requested, &matching_codec)) { |
| 408 | // Codec not supported. |
| 409 | return false; |
| 410 | } |
| 411 | |
| 412 | // Pick the best quality that is within their and our bounds and has the |
| 413 | // correct aspect ratio. |
| 414 | VideoFormat format; |
| 415 | if (requested.width == 0 && requested.height == 0) { |
| 416 | // Special case with resolution 0. The channel should not send frames. |
| 417 | } else { |
| 418 | int max_width = talk_base::_min(requested.width, matching_codec.width); |
| 419 | int max_height = talk_base::_min(requested.height, matching_codec.height); |
| 420 | int aspect_width = max_width; |
| 421 | int aspect_height = max_height; |
| 422 | if (current.width > 0 && current.height > 0) { |
| 423 | aspect_width = current.width; |
| 424 | aspect_height = current.height; |
| 425 | } |
| 426 | if (!FindBestVideoFormat( |
| 427 | max_width, max_height, aspect_width, aspect_height, &format)) { |
| 428 | return false; |
| 429 | } |
| 430 | } |
| 431 | |
| 432 | out->id = requested.id; |
| 433 | out->name = requested.name; |
| 434 | out->preference = requested.preference; |
| 435 | out->params = requested.params; |
| 436 | out->framerate = |
| 437 | talk_base::_min(requested.framerate, matching_codec.framerate); |
| 438 | out->width = format.width; |
| 439 | out->height = format.height; |
| 440 | out->params = requested.params; |
| 441 | out->feedback_params = requested.feedback_params; |
| 442 | return true; |
| 443 | } |
| 444 | |
| 445 | bool WebRtcVideoEngine2::SetVoiceEngine(WebRtcVoiceEngine* voice_engine) { |
| 446 | if (initialized_) { |
| 447 | LOG(LS_WARNING) << "SetVoiceEngine can not be called after Init"; |
| 448 | return false; |
| 449 | } |
| 450 | voice_engine_ = voice_engine; |
| 451 | return true; |
| 452 | } |
| 453 | |
| 454 | // Ignore spammy trace messages, mostly from the stats API when we haven't |
| 455 | // gotten RTCP info yet from the remote side. |
| 456 | bool WebRtcVideoEngine2::ShouldIgnoreTrace(const std::string& trace) { |
| 457 | static const char* const kTracesToIgnore[] = {NULL}; |
| 458 | for (const char* const* p = kTracesToIgnore; *p; ++p) { |
| 459 | if (trace.find(*p) == 0) { |
| 460 | return true; |
| 461 | } |
| 462 | } |
| 463 | return false; |
| 464 | } |
| 465 | |
| 466 | WebRtcVideoEncoderFactory2* WebRtcVideoEngine2::GetDefaultVideoEncoderFactory() |
| 467 | const { |
| 468 | return &default_encoder_factory; |
| 469 | } |
| 470 | |
| 471 | // Thin map between cricket::VideoFrame and an existing webrtc::I420VideoFrame |
| 472 | // to avoid having to copy the rendered VideoFrame prematurely. |
| 473 | // This implementation is only safe to use in a const context and should never |
| 474 | // be written to. |
| 475 | class WebRtcVideoRenderFrame : public cricket::VideoFrame { |
| 476 | public: |
| 477 | explicit WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame) |
| 478 | : frame_(frame) {} |
| 479 | |
| 480 | virtual bool InitToBlack(int w, |
| 481 | int h, |
| 482 | size_t pixel_width, |
| 483 | size_t pixel_height, |
| 484 | int64 elapsed_time, |
| 485 | int64 time_stamp) OVERRIDE { |
| 486 | UNIMPLEMENTED; |
| 487 | return false; |
| 488 | } |
| 489 | |
| 490 | virtual bool Reset(uint32 fourcc, |
| 491 | int w, |
| 492 | int h, |
| 493 | int dw, |
| 494 | int dh, |
| 495 | uint8* sample, |
| 496 | size_t sample_size, |
| 497 | size_t pixel_width, |
| 498 | size_t pixel_height, |
| 499 | int64 elapsed_time, |
| 500 | int64 time_stamp, |
| 501 | int rotation) OVERRIDE { |
| 502 | UNIMPLEMENTED; |
| 503 | return false; |
| 504 | } |
| 505 | |
| 506 | virtual size_t GetWidth() const OVERRIDE { |
| 507 | return static_cast<size_t>(frame_->width()); |
| 508 | } |
| 509 | virtual size_t GetHeight() const OVERRIDE { |
| 510 | return static_cast<size_t>(frame_->height()); |
| 511 | } |
| 512 | |
| 513 | virtual const uint8* GetYPlane() const OVERRIDE { |
| 514 | return frame_->buffer(webrtc::kYPlane); |
| 515 | } |
| 516 | virtual const uint8* GetUPlane() const OVERRIDE { |
| 517 | return frame_->buffer(webrtc::kUPlane); |
| 518 | } |
| 519 | virtual const uint8* GetVPlane() const OVERRIDE { |
| 520 | return frame_->buffer(webrtc::kVPlane); |
| 521 | } |
| 522 | |
| 523 | virtual uint8* GetYPlane() OVERRIDE { |
| 524 | UNIMPLEMENTED; |
| 525 | return NULL; |
| 526 | } |
| 527 | virtual uint8* GetUPlane() OVERRIDE { |
| 528 | UNIMPLEMENTED; |
| 529 | return NULL; |
| 530 | } |
| 531 | virtual uint8* GetVPlane() OVERRIDE { |
| 532 | UNIMPLEMENTED; |
| 533 | return NULL; |
| 534 | } |
| 535 | |
| 536 | virtual int32 GetYPitch() const OVERRIDE { |
| 537 | return frame_->stride(webrtc::kYPlane); |
| 538 | } |
| 539 | virtual int32 GetUPitch() const OVERRIDE { |
| 540 | return frame_->stride(webrtc::kUPlane); |
| 541 | } |
| 542 | virtual int32 GetVPitch() const OVERRIDE { |
| 543 | return frame_->stride(webrtc::kVPlane); |
| 544 | } |
| 545 | |
| 546 | virtual void* GetNativeHandle() const OVERRIDE { return NULL; } |
| 547 | |
| 548 | virtual size_t GetPixelWidth() const OVERRIDE { return 1; } |
| 549 | virtual size_t GetPixelHeight() const OVERRIDE { return 1; } |
| 550 | |
| 551 | virtual int64 GetElapsedTime() const OVERRIDE { |
| 552 | // Convert millisecond render time to ns timestamp. |
| 553 | return frame_->render_time_ms() * talk_base::kNumNanosecsPerMillisec; |
| 554 | } |
| 555 | virtual int64 GetTimeStamp() const OVERRIDE { |
| 556 | // Convert 90K rtp timestamp to ns timestamp. |
| 557 | return (frame_->timestamp() / 90) * talk_base::kNumNanosecsPerMillisec; |
| 558 | } |
| 559 | virtual void SetElapsedTime(int64 elapsed_time) OVERRIDE { UNIMPLEMENTED; } |
| 560 | virtual void SetTimeStamp(int64 time_stamp) OVERRIDE { UNIMPLEMENTED; } |
| 561 | |
| 562 | virtual int GetRotation() const OVERRIDE { |
| 563 | UNIMPLEMENTED; |
| 564 | return ROTATION_0; |
| 565 | } |
| 566 | |
| 567 | virtual VideoFrame* Copy() const OVERRIDE { |
| 568 | UNIMPLEMENTED; |
| 569 | return NULL; |
| 570 | } |
| 571 | |
| 572 | virtual bool MakeExclusive() OVERRIDE { |
| 573 | UNIMPLEMENTED; |
| 574 | return false; |
| 575 | } |
| 576 | |
| 577 | virtual size_t CopyToBuffer(uint8* buffer, size_t size) const { |
| 578 | UNIMPLEMENTED; |
| 579 | return 0; |
| 580 | } |
| 581 | |
| 582 | // TODO(fbarchard): Refactor into base class and share with LMI |
| 583 | virtual size_t ConvertToRgbBuffer(uint32 to_fourcc, |
| 584 | uint8* buffer, |
| 585 | size_t size, |
| 586 | int stride_rgb) const OVERRIDE { |
| 587 | size_t width = GetWidth(); |
| 588 | size_t height = GetHeight(); |
| 589 | size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height; |
| 590 | if (size < needed) { |
| 591 | LOG(LS_WARNING) << "RGB buffer is not large enough"; |
| 592 | return needed; |
| 593 | } |
| 594 | |
| 595 | if (libyuv::ConvertFromI420(GetYPlane(), |
| 596 | GetYPitch(), |
| 597 | GetUPlane(), |
| 598 | GetUPitch(), |
| 599 | GetVPlane(), |
| 600 | GetVPitch(), |
| 601 | buffer, |
| 602 | stride_rgb, |
| 603 | static_cast<int>(width), |
| 604 | static_cast<int>(height), |
| 605 | to_fourcc)) { |
| 606 | LOG(LS_ERROR) << "RGB type not supported: " << to_fourcc; |
| 607 | return 0; // 0 indicates error |
| 608 | } |
| 609 | return needed; |
| 610 | } |
| 611 | |
| 612 | protected: |
| 613 | virtual VideoFrame* CreateEmptyFrame(int w, |
| 614 | int h, |
| 615 | size_t pixel_width, |
| 616 | size_t pixel_height, |
| 617 | int64 elapsed_time, |
| 618 | int64 time_stamp) const OVERRIDE { |
| 619 | // TODO(pbos): Remove WebRtcVideoFrame dependency, and have a non-const |
| 620 | // version of I420VideoFrame wrapped. |
| 621 | WebRtcVideoFrame* frame = new WebRtcVideoFrame(); |
| 622 | frame->InitToBlack( |
| 623 | w, h, pixel_width, pixel_height, elapsed_time, time_stamp); |
| 624 | return frame; |
| 625 | } |
| 626 | |
| 627 | private: |
| 628 | const webrtc::I420VideoFrame* const frame_; |
| 629 | }; |
| 630 | |
| 631 | WebRtcVideoRenderer::WebRtcVideoRenderer() |
| 632 | : last_width_(-1), last_height_(-1), renderer_(NULL) {} |
| 633 | |
| 634 | void WebRtcVideoRenderer::RenderFrame(const webrtc::I420VideoFrame& frame, |
| 635 | int time_to_render_ms) { |
| 636 | talk_base::CritScope crit(&lock_); |
| 637 | if (renderer_ == NULL) { |
| 638 | LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoRenderer."; |
| 639 | return; |
| 640 | } |
| 641 | |
| 642 | if (frame.width() != last_width_ || frame.height() != last_height_) { |
| 643 | SetSize(frame.width(), frame.height()); |
| 644 | } |
| 645 | |
| 646 | LOG(LS_VERBOSE) << "RenderFrame: (" << frame.width() << "x" << frame.height() |
| 647 | << ")"; |
| 648 | |
| 649 | const WebRtcVideoRenderFrame render_frame(&frame); |
| 650 | renderer_->RenderFrame(&render_frame); |
| 651 | } |
| 652 | |
| 653 | void WebRtcVideoRenderer::SetRenderer(cricket::VideoRenderer* renderer) { |
| 654 | talk_base::CritScope crit(&lock_); |
| 655 | renderer_ = renderer; |
| 656 | if (renderer_ != NULL && last_width_ != -1) { |
| 657 | SetSize(last_width_, last_height_); |
| 658 | } |
| 659 | } |
| 660 | |
| 661 | VideoRenderer* WebRtcVideoRenderer::GetRenderer() { |
| 662 | talk_base::CritScope crit(&lock_); |
| 663 | return renderer_; |
| 664 | } |
| 665 | |
| 666 | void WebRtcVideoRenderer::SetSize(int width, int height) { |
| 667 | talk_base::CritScope crit(&lock_); |
| 668 | if (!renderer_->SetSize(width, height, 0)) { |
| 669 | LOG(LS_ERROR) << "Could not set renderer size."; |
| 670 | } |
| 671 | last_width_ = width; |
| 672 | last_height_ = height; |
| 673 | } |
| 674 | |
| 675 | // WebRtcVideoChannel2 |
| 676 | |
| 677 | WebRtcVideoChannel2::WebRtcVideoChannel2( |
| 678 | WebRtcVideoEngine2* engine, |
| 679 | VoiceMediaChannel* voice_channel, |
| 680 | WebRtcVideoEncoderFactory2* encoder_factory) |
| 681 | : encoder_factory_(encoder_factory) { |
| 682 | // TODO(pbos): Connect the video and audio with |voice_channel|. |
| 683 | webrtc::Call::Config config(this); |
| 684 | Construct(webrtc::Call::Create(config), engine); |
| 685 | } |
| 686 | |
| 687 | WebRtcVideoChannel2::WebRtcVideoChannel2( |
| 688 | webrtc::Call* call, |
| 689 | WebRtcVideoEngine2* engine, |
| 690 | WebRtcVideoEncoderFactory2* encoder_factory) |
| 691 | : encoder_factory_(encoder_factory) { |
| 692 | Construct(call, engine); |
| 693 | } |
| 694 | |
| 695 | void WebRtcVideoChannel2::Construct(webrtc::Call* call, |
| 696 | WebRtcVideoEngine2* engine) { |
| 697 | rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; |
| 698 | sending_ = false; |
| 699 | call_.reset(call); |
| 700 | default_renderer_ = NULL; |
| 701 | default_send_ssrc_ = 0; |
| 702 | default_recv_ssrc_ = 0; |
| 703 | } |
| 704 | |
| 705 | WebRtcVideoChannel2::~WebRtcVideoChannel2() { |
| 706 | for (std::map<uint32, WebRtcVideoSendStream*>::iterator it = |
| 707 | send_streams_.begin(); |
| 708 | it != send_streams_.end(); |
| 709 | ++it) { |
| 710 | delete it->second; |
| 711 | } |
| 712 | |
| 713 | for (std::map<uint32, webrtc::VideoReceiveStream*>::iterator it = |
| 714 | receive_streams_.begin(); |
| 715 | it != receive_streams_.end(); |
| 716 | ++it) { |
| 717 | assert(it->second != NULL); |
| 718 | call_->DestroyVideoReceiveStream(it->second); |
| 719 | } |
| 720 | |
| 721 | for (std::map<uint32, WebRtcVideoRenderer*>::iterator it = renderers_.begin(); |
| 722 | it != renderers_.end(); |
| 723 | ++it) { |
| 724 | assert(it->second != NULL); |
| 725 | delete it->second; |
| 726 | } |
| 727 | } |
| 728 | |
| 729 | bool WebRtcVideoChannel2::Init() { return true; } |
| 730 | |
| 731 | namespace { |
| 732 | |
| 733 | static bool ValidateCodecFormats(const std::vector<VideoCodec>& codecs) { |
| 734 | for (size_t i = 0; i < codecs.size(); ++i) { |
| 735 | if (!codecs[i].ValidateCodecFormat()) { |
| 736 | return false; |
| 737 | } |
| 738 | } |
| 739 | return true; |
| 740 | } |
| 741 | |
| 742 | static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) { |
| 743 | std::stringstream out; |
| 744 | out << '{'; |
| 745 | for (size_t i = 0; i < codecs.size(); ++i) { |
| 746 | out << codecs[i].ToString(); |
| 747 | if (i != codecs.size() - 1) { |
| 748 | out << ", "; |
| 749 | } |
| 750 | } |
| 751 | out << '}'; |
| 752 | return out.str(); |
| 753 | } |
| 754 | |
| 755 | } // namespace |
| 756 | |
| 757 | bool WebRtcVideoChannel2::SetRecvCodecs(const std::vector<VideoCodec>& codecs) { |
| 758 | // TODO(pbos): Must these receive codecs propagate to existing receive |
| 759 | // streams? |
| 760 | LOG(LS_INFO) << "SetRecvCodecs: " << CodecVectorToString(codecs); |
| 761 | if (!ValidateCodecFormats(codecs)) { |
| 762 | return false; |
| 763 | } |
| 764 | |
| 765 | const std::vector<VideoCodecSettings> mapped_codecs = MapCodecs(codecs); |
| 766 | if (mapped_codecs.empty()) { |
| 767 | LOG(LS_ERROR) << "SetRecvCodecs called without video codec payloads."; |
| 768 | return false; |
| 769 | } |
| 770 | |
| 771 | // TODO(pbos): Add a decoder factory which controls supported codecs. |
| 772 | // Blocked on webrtc:2854. |
| 773 | for (size_t i = 0; i < mapped_codecs.size(); ++i) { |
| 774 | if (_stricmp(mapped_codecs[i].codec.name.c_str(), kVp8PayloadName) != 0) { |
| 775 | LOG(LS_ERROR) << "SetRecvCodecs called with unsupported codec: '" |
| 776 | << mapped_codecs[i].codec.name << "'"; |
| 777 | return false; |
| 778 | } |
| 779 | } |
| 780 | |
| 781 | recv_codecs_ = mapped_codecs; |
| 782 | return true; |
| 783 | } |
| 784 | |
| 785 | bool WebRtcVideoChannel2::SetSendCodecs(const std::vector<VideoCodec>& codecs) { |
| 786 | LOG(LS_INFO) << "SetSendCodecs: " << CodecVectorToString(codecs); |
| 787 | if (!ValidateCodecFormats(codecs)) { |
| 788 | return false; |
| 789 | } |
| 790 | |
| 791 | const std::vector<VideoCodecSettings> supported_codecs = |
| 792 | FilterSupportedCodecs(MapCodecs(codecs)); |
| 793 | |
| 794 | if (supported_codecs.empty()) { |
| 795 | LOG(LS_ERROR) << "No video codecs supported by encoder factory."; |
| 796 | return false; |
| 797 | } |
| 798 | |
| 799 | send_codec_.Set(supported_codecs.front()); |
| 800 | LOG(LS_INFO) << "Using codec: " << supported_codecs.front().codec.ToString(); |
| 801 | |
| 802 | SetCodecForAllSendStreams(supported_codecs.front()); |
| 803 | |
| 804 | return true; |
| 805 | } |
| 806 | |
| 807 | bool WebRtcVideoChannel2::GetSendCodec(VideoCodec* codec) { |
| 808 | VideoCodecSettings codec_settings; |
| 809 | if (!send_codec_.Get(&codec_settings)) { |
| 810 | LOG(LS_VERBOSE) << "GetSendCodec: No send codec set."; |
| 811 | return false; |
| 812 | } |
| 813 | *codec = codec_settings.codec; |
| 814 | return true; |
| 815 | } |
| 816 | |
| 817 | bool WebRtcVideoChannel2::SetSendStreamFormat(uint32 ssrc, |
| 818 | const VideoFormat& format) { |
| 819 | LOG(LS_VERBOSE) << "SetSendStreamFormat:" << ssrc << " -> " |
| 820 | << format.ToString(); |
| 821 | if (send_streams_.find(ssrc) == send_streams_.end()) { |
| 822 | return false; |
| 823 | } |
| 824 | return send_streams_[ssrc]->SetVideoFormat(format); |
| 825 | } |
| 826 | |
| 827 | bool WebRtcVideoChannel2::SetRender(bool render) { |
| 828 | // TODO(pbos): Implement. Or refactor away as it shouldn't be needed. |
| 829 | LOG(LS_VERBOSE) << "SetRender: " << (render ? "true" : "false"); |
| 830 | return true; |
| 831 | } |
| 832 | |
| 833 | bool WebRtcVideoChannel2::SetSend(bool send) { |
| 834 | LOG(LS_VERBOSE) << "SetSend: " << (send ? "true" : "false"); |
| 835 | if (send && !send_codec_.IsSet()) { |
| 836 | LOG(LS_ERROR) << "SetSend(true) called before setting codec."; |
| 837 | return false; |
| 838 | } |
| 839 | if (send) { |
| 840 | StartAllSendStreams(); |
| 841 | } else { |
| 842 | StopAllSendStreams(); |
| 843 | } |
| 844 | sending_ = send; |
| 845 | return true; |
| 846 | } |
| 847 | |
| 848 | static bool ConfigureSendSsrcs(webrtc::VideoSendStream::Config* config, |
| 849 | const StreamParams& sp) { |
| 850 | if (!sp.has_ssrc_groups()) { |
| 851 | config->rtp.ssrcs = sp.ssrcs; |
| 852 | return true; |
| 853 | } |
| 854 | |
| 855 | if (sp.get_ssrc_group(kFecSsrcGroupSemantics) != NULL) { |
| 856 | LOG(LS_ERROR) << "Standalone FEC SSRCs not supported."; |
| 857 | return false; |
| 858 | } |
| 859 | |
| 860 | const SsrcGroup* sim_group = sp.get_ssrc_group(kSimSsrcGroupSemantics); |
| 861 | if (sim_group == NULL) { |
| 862 | LOG(LS_ERROR) << "Grouped StreamParams without regular SSRC group: " |
| 863 | << sp.ToString(); |
| 864 | return false; |
| 865 | } |
| 866 | |
| 867 | // Map RTX SSRCs. |
| 868 | std::vector<uint32_t> rtx_ssrcs; |
| 869 | for (size_t i = 0; i < sim_group->ssrcs.size(); ++i) { |
| 870 | uint32_t rtx_ssrc; |
| 871 | if (!sp.GetFidSsrc(sim_group->ssrcs[i], &rtx_ssrc)) { |
| 872 | continue; |
| 873 | } |
| 874 | rtx_ssrcs.push_back(rtx_ssrc); |
| 875 | } |
| 876 | if (!rtx_ssrcs.empty() && sim_group->ssrcs.size() != rtx_ssrcs.size()) { |
| 877 | LOG(LS_ERROR) |
| 878 | << "RTX SSRCs exist, but don't cover all SSRCs (unsupported): " |
| 879 | << sp.ToString(); |
| 880 | return false; |
| 881 | } |
| 882 | config->rtp.rtx.ssrcs = rtx_ssrcs; |
| 883 | config->rtp.ssrcs = sim_group->ssrcs; |
| 884 | return true; |
| 885 | } |
| 886 | |
| 887 | bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) { |
| 888 | LOG(LS_INFO) << "AddSendStream: " << sp.ToString(); |
| 889 | if (sp.ssrcs.empty()) { |
| 890 | LOG(LS_ERROR) << "No SSRCs in stream parameters."; |
| 891 | return false; |
| 892 | } |
| 893 | |
| 894 | uint32 ssrc = sp.first_ssrc(); |
| 895 | assert(ssrc != 0); |
| 896 | // TODO(pbos): Make sure none of sp.ssrcs are used, not just the identifying |
| 897 | // ssrc. |
| 898 | if (send_streams_.find(ssrc) != send_streams_.end()) { |
| 899 | LOG(LS_ERROR) << "Send stream with ssrc '" << ssrc << "' already exists."; |
| 900 | return false; |
| 901 | } |
| 902 | |
| 903 | webrtc::VideoSendStream::Config config = call_->GetDefaultSendConfig(); |
| 904 | |
| 905 | if (!ConfigureSendSsrcs(&config, sp)) { |
| 906 | return false; |
| 907 | } |
| 908 | |
| 909 | VideoCodecSettings codec_settings; |
| 910 | if (!send_codec_.Get(&codec_settings)) { |
| 911 | // TODO(pbos): Set up a temporary fake encoder for VideoSendStream instead |
| 912 | // of setting default codecs not to break CreateEncoderSettings. |
| 913 | SetSendCodecs(DefaultVideoCodecs()); |
| 914 | assert(send_codec_.IsSet()); |
| 915 | send_codec_.Get(&codec_settings); |
| 916 | // This is only to bring up defaults to make VideoSendStream setup easier |
| 917 | // and avoid complexity. We still don't want to allow sending with the |
| 918 | // default codec. |
| 919 | send_codec_.Clear(); |
| 920 | } |
| 921 | |
| 922 | // CreateEncoderSettings will allocate a suitable VideoEncoder instance |
| 923 | // matching current settings. |
| 924 | if (!encoder_factory_->CreateEncoderSettings(&config.encoder_settings, |
| 925 | options_, |
| 926 | codec_settings.codec, |
| 927 | config.rtp.ssrcs.size())) { |
| 928 | LOG(LS_ERROR) << "Failed to create suitable encoder settings."; |
| 929 | return false; |
| 930 | } |
| 931 | |
| 932 | config.rtp.c_name = sp.cname; |
| 933 | config.rtp.fec = codec_settings.fec; |
| 934 | if (!config.rtp.rtx.ssrcs.empty()) { |
| 935 | config.rtp.rtx.payload_type = codec_settings.rtx_payload_type; |
| 936 | } |
| 937 | |
| 938 | config.rtp.nack.rtp_history_ms = kNackHistoryMs; |
| 939 | config.rtp.max_packet_size = kVideoMtu; |
| 940 | |
| 941 | WebRtcVideoSendStream* stream = |
| 942 | new WebRtcVideoSendStream(call_.get(), config, encoder_factory_); |
| 943 | send_streams_[ssrc] = stream; |
| 944 | |
| 945 | if (rtcp_receiver_report_ssrc_ == kDefaultRtcpReceiverReportSsrc) { |
| 946 | rtcp_receiver_report_ssrc_ = ssrc; |
| 947 | } |
| 948 | if (default_send_ssrc_ == 0) { |
| 949 | default_send_ssrc_ = ssrc; |
| 950 | } |
| 951 | if (sending_) { |
| 952 | stream->Start(); |
| 953 | } |
| 954 | |
| 955 | return true; |
| 956 | } |
| 957 | |
| 958 | bool WebRtcVideoChannel2::RemoveSendStream(uint32 ssrc) { |
| 959 | LOG(LS_INFO) << "RemoveSendStream: " << ssrc; |
| 960 | |
| 961 | if (ssrc == 0) { |
| 962 | if (default_send_ssrc_ == 0) { |
| 963 | LOG(LS_ERROR) << "No default send stream active."; |
| 964 | return false; |
| 965 | } |
| 966 | |
| 967 | LOG(LS_VERBOSE) << "Removing default stream: " << default_send_ssrc_; |
| 968 | ssrc = default_send_ssrc_; |
| 969 | } |
| 970 | |
| 971 | std::map<uint32, WebRtcVideoSendStream*>::iterator it = |
| 972 | send_streams_.find(ssrc); |
| 973 | if (it == send_streams_.end()) { |
| 974 | return false; |
| 975 | } |
| 976 | |
| 977 | delete it->second; |
| 978 | send_streams_.erase(it); |
| 979 | |
| 980 | if (ssrc == default_send_ssrc_) { |
| 981 | default_send_ssrc_ = 0; |
| 982 | } |
| 983 | |
| 984 | return true; |
| 985 | } |
| 986 | |
| 987 | bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp) { |
| 988 | LOG(LS_INFO) << "AddRecvStream: " << sp.ToString(); |
| 989 | assert(sp.ssrcs.size() > 0); |
| 990 | |
| 991 | uint32 ssrc = sp.first_ssrc(); |
| 992 | assert(ssrc != 0); // TODO(pbos): Is this ever valid? |
| 993 | if (default_recv_ssrc_ == 0) { |
| 994 | default_recv_ssrc_ = ssrc; |
| 995 | } |
| 996 | |
| 997 | // TODO(pbos): Check if any of the SSRCs overlap. |
| 998 | if (receive_streams_.find(ssrc) != receive_streams_.end()) { |
| 999 | LOG(LS_ERROR) << "Receive stream for SSRC " << ssrc << "already exists."; |
| 1000 | return false; |
| 1001 | } |
| 1002 | |
| 1003 | webrtc::VideoReceiveStream::Config config = call_->GetDefaultReceiveConfig(); |
| 1004 | config.rtp.remote_ssrc = ssrc; |
| 1005 | config.rtp.local_ssrc = rtcp_receiver_report_ssrc_; |
| 1006 | uint32 rtx_ssrc = 0; |
| 1007 | if (sp.GetFidSsrc(ssrc, &rtx_ssrc)) { |
| 1008 | // TODO(pbos): Right now, VideoReceiveStream accepts any rtx payload, this |
| 1009 | // should use the actual codec payloads that may be received. |
| 1010 | // (for each receive payload, set rtx[payload].ssrc = rtx_ssrc. |
| 1011 | config.rtp.rtx[0].ssrc = rtx_ssrc; |
| 1012 | } |
| 1013 | |
| 1014 | config.rtp.remb = true; |
| 1015 | // TODO(pbos): This protection is against setting the same local ssrc as |
| 1016 | // remote which is not permitted by the lower-level API. RTCP requires a |
| 1017 | // corresponding sender SSRC. Figure out what to do when we don't have |
| 1018 | // (receive-only) or know a good local SSRC. |
| 1019 | if (config.rtp.remote_ssrc == config.rtp.local_ssrc) { |
| 1020 | if (config.rtp.local_ssrc != kDefaultRtcpReceiverReportSsrc) { |
| 1021 | config.rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc; |
| 1022 | } else { |
| 1023 | config.rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc + 1; |
| 1024 | } |
| 1025 | } |
| 1026 | bool default_renderer_used = false; |
| 1027 | for (std::map<uint32, WebRtcVideoRenderer*>::iterator it = renderers_.begin(); |
| 1028 | it != renderers_.end(); |
| 1029 | ++it) { |
| 1030 | if (it->second->GetRenderer() == default_renderer_) { |
| 1031 | default_renderer_used = true; |
| 1032 | break; |
| 1033 | } |
| 1034 | } |
| 1035 | |
| 1036 | assert(renderers_[ssrc] == NULL); |
| 1037 | renderers_[ssrc] = new WebRtcVideoRenderer(); |
| 1038 | if (!default_renderer_used) { |
| 1039 | renderers_[ssrc]->SetRenderer(default_renderer_); |
| 1040 | } |
| 1041 | config.renderer = renderers_[ssrc]; |
| 1042 | |
| 1043 | { |
| 1044 | // TODO(pbos): Base receive codecs off recv_codecs_ and set up using a |
| 1045 | // DecoderFactory similar to send side. Pending webrtc:2854. |
| 1046 | // Also set up default codecs if there's nothing in recv_codecs_. |
| 1047 | webrtc::VideoCodec codec; |
| 1048 | memset(&codec, 0, sizeof(codec)); |
| 1049 | |
| 1050 | codec.plType = kDefaultVideoCodecPref.payload_type; |
| 1051 | strcpy(codec.plName, kDefaultVideoCodecPref.name); |
| 1052 | codec.codecType = webrtc::kVideoCodecVP8; |
| 1053 | codec.codecSpecific.VP8.resilience = webrtc::kResilientStream; |
| 1054 | codec.codecSpecific.VP8.numberOfTemporalLayers = 1; |
| 1055 | codec.codecSpecific.VP8.denoisingOn = true; |
| 1056 | codec.codecSpecific.VP8.errorConcealmentOn = false; |
| 1057 | codec.codecSpecific.VP8.automaticResizeOn = false; |
| 1058 | codec.codecSpecific.VP8.frameDroppingOn = true; |
| 1059 | codec.codecSpecific.VP8.keyFrameInterval = 3000; |
| 1060 | // Bitrates don't matter and are ignored for the receiver. This is put in to |
| 1061 | // have the current underlying implementation accept the VideoCodec. |
| 1062 | codec.minBitrate = codec.startBitrate = codec.maxBitrate = 300; |
| 1063 | config.codecs.push_back(codec); |
| 1064 | for (size_t i = 0; i < recv_codecs_.size(); ++i) { |
| 1065 | if (recv_codecs_[i].codec.id == codec.plType) { |
| 1066 | config.rtp.fec = recv_codecs_[i].fec; |
| 1067 | if (recv_codecs_[i].rtx_payload_type != -1 && rtx_ssrc != 0) { |
| 1068 | config.rtp.rtx[codec.plType].ssrc = rtx_ssrc; |
| 1069 | config.rtp.rtx[codec.plType].payload_type = |
| 1070 | recv_codecs_[i].rtx_payload_type; |
| 1071 | } |
| 1072 | break; |
| 1073 | } |
| 1074 | } |
| 1075 | } |
| 1076 | |
| 1077 | webrtc::VideoReceiveStream* receive_stream = |
| 1078 | call_->CreateVideoReceiveStream(config); |
| 1079 | assert(receive_stream != NULL); |
| 1080 | |
| 1081 | receive_streams_[ssrc] = receive_stream; |
| 1082 | receive_stream->Start(); |
| 1083 | |
| 1084 | return true; |
| 1085 | } |
| 1086 | |
| 1087 | bool WebRtcVideoChannel2::RemoveRecvStream(uint32 ssrc) { |
| 1088 | LOG(LS_INFO) << "RemoveRecvStream: " << ssrc; |
| 1089 | if (ssrc == 0) { |
| 1090 | ssrc = default_recv_ssrc_; |
| 1091 | } |
| 1092 | |
| 1093 | std::map<uint32, webrtc::VideoReceiveStream*>::iterator stream = |
| 1094 | receive_streams_.find(ssrc); |
| 1095 | if (stream == receive_streams_.end()) { |
| 1096 | LOG(LS_ERROR) << "Stream not found for ssrc: " << ssrc; |
| 1097 | return false; |
| 1098 | } |
| 1099 | call_->DestroyVideoReceiveStream(stream->second); |
| 1100 | receive_streams_.erase(stream); |
| 1101 | |
| 1102 | std::map<uint32, WebRtcVideoRenderer*>::iterator renderer = |
| 1103 | renderers_.find(ssrc); |
| 1104 | assert(renderer != renderers_.end()); |
| 1105 | delete renderer->second; |
| 1106 | renderers_.erase(renderer); |
| 1107 | |
| 1108 | if (ssrc == default_recv_ssrc_) { |
| 1109 | default_recv_ssrc_ = 0; |
| 1110 | } |
| 1111 | |
| 1112 | return true; |
| 1113 | } |
| 1114 | |
| 1115 | bool WebRtcVideoChannel2::SetRenderer(uint32 ssrc, VideoRenderer* renderer) { |
| 1116 | LOG(LS_INFO) << "SetRenderer: ssrc:" << ssrc << " " |
| 1117 | << (renderer ? "(ptr)" : "NULL"); |
| 1118 | bool is_default_ssrc = false; |
| 1119 | if (ssrc == 0) { |
| 1120 | is_default_ssrc = true; |
| 1121 | ssrc = default_recv_ssrc_; |
| 1122 | default_renderer_ = renderer; |
| 1123 | } |
| 1124 | |
| 1125 | std::map<uint32, WebRtcVideoRenderer*>::iterator it = renderers_.find(ssrc); |
| 1126 | if (it == renderers_.end()) { |
| 1127 | return is_default_ssrc; |
| 1128 | } |
| 1129 | |
| 1130 | it->second->SetRenderer(renderer); |
| 1131 | return true; |
| 1132 | } |
| 1133 | |
| 1134 | bool WebRtcVideoChannel2::GetRenderer(uint32 ssrc, VideoRenderer** renderer) { |
| 1135 | if (ssrc == 0) { |
| 1136 | if (default_renderer_ == NULL) { |
| 1137 | return false; |
| 1138 | } |
| 1139 | *renderer = default_renderer_; |
| 1140 | return true; |
| 1141 | } |
| 1142 | |
| 1143 | std::map<uint32, WebRtcVideoRenderer*>::iterator it = renderers_.find(ssrc); |
| 1144 | if (it == renderers_.end()) { |
| 1145 | return false; |
| 1146 | } |
| 1147 | *renderer = it->second->GetRenderer(); |
| 1148 | return true; |
| 1149 | } |
| 1150 | |
| 1151 | bool WebRtcVideoChannel2::GetStats(const StatsOptions& options, |
| 1152 | VideoMediaInfo* info) { |
| 1153 | // TODO(pbos): Implement. |
| 1154 | return true; |
| 1155 | } |
| 1156 | |
| 1157 | bool WebRtcVideoChannel2::SetCapturer(uint32 ssrc, VideoCapturer* capturer) { |
| 1158 | LOG(LS_INFO) << "SetCapturer: " << ssrc << " -> " |
| 1159 | << (capturer != NULL ? "(capturer)" : "NULL"); |
| 1160 | assert(ssrc != 0); |
| 1161 | if (send_streams_.find(ssrc) == send_streams_.end()) { |
| 1162 | LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc; |
| 1163 | return false; |
| 1164 | } |
| 1165 | return send_streams_[ssrc]->SetCapturer(capturer); |
| 1166 | } |
| 1167 | |
| 1168 | bool WebRtcVideoChannel2::SendIntraFrame() { |
| 1169 | // TODO(pbos): Implement. |
| 1170 | LOG(LS_VERBOSE) << "SendIntraFrame()."; |
| 1171 | return true; |
| 1172 | } |
| 1173 | |
| 1174 | bool WebRtcVideoChannel2::RequestIntraFrame() { |
| 1175 | // TODO(pbos): Implement. |
| 1176 | LOG(LS_VERBOSE) << "SendIntraFrame()."; |
| 1177 | return true; |
| 1178 | } |
| 1179 | |
| 1180 | void WebRtcVideoChannel2::OnPacketReceived( |
| 1181 | talk_base::Buffer* packet, |
| 1182 | const talk_base::PacketTime& packet_time) { |
pbos@webrtc.org | 4e545cc | 2014-05-14 13:58:13 +0000 | [diff] [blame^] | 1183 | const webrtc::PacketReceiver::DeliveryStatus delivery_result = |
| 1184 | call_->Receiver()->DeliverPacket( |
| 1185 | reinterpret_cast<const uint8_t*>(packet->data()), packet->length()); |
| 1186 | switch (delivery_result) { |
| 1187 | case webrtc::PacketReceiver::DELIVERY_OK: |
| 1188 | return; |
| 1189 | case webrtc::PacketReceiver::DELIVERY_PACKET_ERROR: |
| 1190 | return; |
| 1191 | case webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC: |
| 1192 | break; |
pbos@webrtc.org | b5a22b1 | 2014-05-13 11:07:01 +0000 | [diff] [blame] | 1193 | } |
pbos@webrtc.org | b5a22b1 | 2014-05-13 11:07:01 +0000 | [diff] [blame] | 1194 | |
| 1195 | uint32 ssrc = 0; |
| 1196 | if (default_recv_ssrc_ != 0) { // Already one default stream. |
pbos@webrtc.org | 4e545cc | 2014-05-14 13:58:13 +0000 | [diff] [blame^] | 1197 | LOG(LS_WARNING) << "Unknown SSRC, but default receive stream already set."; |
pbos@webrtc.org | b5a22b1 | 2014-05-13 11:07:01 +0000 | [diff] [blame] | 1198 | return; |
| 1199 | } |
| 1200 | |
| 1201 | if (!GetRtpSsrc(packet->data(), packet->length(), &ssrc)) { |
| 1202 | return; |
| 1203 | } |
| 1204 | |
| 1205 | StreamParams sp; |
| 1206 | sp.ssrcs.push_back(ssrc); |
| 1207 | AddRecvStream(sp); |
| 1208 | |
| 1209 | if (!call_->Receiver()->DeliverPacket( |
| 1210 | reinterpret_cast<const uint8_t*>(packet->data()), packet->length())) { |
| 1211 | LOG(LS_WARNING) << "Failed to deliver RTP packet."; |
| 1212 | return; |
| 1213 | } |
| 1214 | } |
| 1215 | |
| 1216 | void WebRtcVideoChannel2::OnRtcpReceived( |
| 1217 | talk_base::Buffer* packet, |
| 1218 | const talk_base::PacketTime& packet_time) { |
| 1219 | if (!call_->Receiver()->DeliverPacket( |
| 1220 | reinterpret_cast<const uint8_t*>(packet->data()), packet->length())) { |
| 1221 | LOG(LS_WARNING) << "Failed to deliver RTCP packet."; |
| 1222 | } |
| 1223 | } |
| 1224 | |
| 1225 | void WebRtcVideoChannel2::OnReadyToSend(bool ready) { |
| 1226 | LOG(LS_VERBOSE) << "OnReadySend: " << (ready ? "Ready." : "Not ready."); |
| 1227 | } |
| 1228 | |
| 1229 | bool WebRtcVideoChannel2::MuteStream(uint32 ssrc, bool mute) { |
| 1230 | LOG(LS_VERBOSE) << "MuteStream: " << ssrc << " -> " |
| 1231 | << (mute ? "mute" : "unmute"); |
| 1232 | assert(ssrc != 0); |
| 1233 | if (send_streams_.find(ssrc) == send_streams_.end()) { |
| 1234 | LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc; |
| 1235 | return false; |
| 1236 | } |
| 1237 | return send_streams_[ssrc]->MuteStream(mute); |
| 1238 | } |
| 1239 | |
| 1240 | bool WebRtcVideoChannel2::SetRecvRtpHeaderExtensions( |
| 1241 | const std::vector<RtpHeaderExtension>& extensions) { |
| 1242 | // TODO(pbos): Implement. |
| 1243 | LOG(LS_VERBOSE) << "SetRecvRtpHeaderExtensions()"; |
| 1244 | return true; |
| 1245 | } |
| 1246 | |
| 1247 | bool WebRtcVideoChannel2::SetSendRtpHeaderExtensions( |
| 1248 | const std::vector<RtpHeaderExtension>& extensions) { |
| 1249 | // TODO(pbos): Implement. |
| 1250 | LOG(LS_VERBOSE) << "SetSendRtpHeaderExtensions()"; |
| 1251 | return true; |
| 1252 | } |
| 1253 | |
| 1254 | bool WebRtcVideoChannel2::SetStartSendBandwidth(int bps) { |
| 1255 | // TODO(pbos): Implement. |
| 1256 | LOG(LS_VERBOSE) << "SetStartSendBandwidth: " << bps; |
| 1257 | return true; |
| 1258 | } |
| 1259 | |
| 1260 | bool WebRtcVideoChannel2::SetMaxSendBandwidth(int bps) { |
| 1261 | // TODO(pbos): Implement. |
| 1262 | LOG(LS_VERBOSE) << "SetMaxSendBandwidth: " << bps; |
| 1263 | return true; |
| 1264 | } |
| 1265 | |
| 1266 | bool WebRtcVideoChannel2::SetOptions(const VideoOptions& options) { |
| 1267 | LOG(LS_VERBOSE) << "SetOptions: " << options.ToString(); |
| 1268 | options_.SetAll(options); |
| 1269 | return true; |
| 1270 | } |
| 1271 | |
| 1272 | void WebRtcVideoChannel2::SetInterface(NetworkInterface* iface) { |
| 1273 | MediaChannel::SetInterface(iface); |
| 1274 | // Set the RTP recv/send buffer to a bigger size |
| 1275 | MediaChannel::SetOption(NetworkInterface::ST_RTP, |
| 1276 | talk_base::Socket::OPT_RCVBUF, |
| 1277 | kVideoRtpBufferSize); |
| 1278 | |
| 1279 | // TODO(sriniv): Remove or re-enable this. |
| 1280 | // As part of b/8030474, send-buffer is size now controlled through |
| 1281 | // portallocator flags. |
| 1282 | // network_interface_->SetOption(NetworkInterface::ST_RTP, |
| 1283 | // talk_base::Socket::OPT_SNDBUF, |
| 1284 | // kVideoRtpBufferSize); |
| 1285 | } |
| 1286 | |
| 1287 | void WebRtcVideoChannel2::UpdateAspectRatio(int ratio_w, int ratio_h) { |
| 1288 | // TODO(pbos): Implement. |
| 1289 | } |
| 1290 | |
| 1291 | void WebRtcVideoChannel2::OnMessage(talk_base::Message* msg) { |
| 1292 | // Ignored. |
| 1293 | } |
| 1294 | |
| 1295 | bool WebRtcVideoChannel2::SendRtp(const uint8_t* data, size_t len) { |
| 1296 | talk_base::Buffer packet(data, len, kMaxRtpPacketLen); |
| 1297 | return MediaChannel::SendPacket(&packet); |
| 1298 | } |
| 1299 | |
| 1300 | bool WebRtcVideoChannel2::SendRtcp(const uint8_t* data, size_t len) { |
| 1301 | talk_base::Buffer packet(data, len, kMaxRtpPacketLen); |
| 1302 | return MediaChannel::SendRtcp(&packet); |
| 1303 | } |
| 1304 | |
| 1305 | void WebRtcVideoChannel2::StartAllSendStreams() { |
| 1306 | for (std::map<uint32, WebRtcVideoSendStream*>::iterator it = |
| 1307 | send_streams_.begin(); |
| 1308 | it != send_streams_.end(); |
| 1309 | ++it) { |
| 1310 | it->second->Start(); |
| 1311 | } |
| 1312 | } |
| 1313 | |
| 1314 | void WebRtcVideoChannel2::StopAllSendStreams() { |
| 1315 | for (std::map<uint32, WebRtcVideoSendStream*>::iterator it = |
| 1316 | send_streams_.begin(); |
| 1317 | it != send_streams_.end(); |
| 1318 | ++it) { |
| 1319 | it->second->Stop(); |
| 1320 | } |
| 1321 | } |
| 1322 | |
| 1323 | void WebRtcVideoChannel2::SetCodecForAllSendStreams( |
| 1324 | const WebRtcVideoChannel2::VideoCodecSettings& codec) { |
| 1325 | for (std::map<uint32, WebRtcVideoSendStream*>::iterator it = |
| 1326 | send_streams_.begin(); |
| 1327 | it != send_streams_.end(); |
| 1328 | ++it) { |
| 1329 | assert(it->second != NULL); |
| 1330 | it->second->SetCodec(options_, codec); |
| 1331 | } |
| 1332 | } |
| 1333 | |
| 1334 | WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( |
| 1335 | webrtc::Call* call, |
| 1336 | const webrtc::VideoSendStream::Config& config, |
| 1337 | WebRtcVideoEncoderFactory2* encoder_factory) |
| 1338 | : call_(call), |
| 1339 | config_(config), |
| 1340 | encoder_factory_(encoder_factory), |
| 1341 | capturer_(NULL), |
| 1342 | stream_(NULL), |
| 1343 | sending_(false), |
| 1344 | muted_(false), |
| 1345 | format_(static_cast<int>(config.encoder_settings.streams.back().height), |
| 1346 | static_cast<int>(config.encoder_settings.streams.back().width), |
| 1347 | VideoFormat::FpsToInterval( |
| 1348 | config.encoder_settings.streams.back().max_framerate), |
| 1349 | FOURCC_I420) { |
| 1350 | RecreateWebRtcStream(); |
| 1351 | } |
| 1352 | |
| 1353 | WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() { |
| 1354 | DisconnectCapturer(); |
| 1355 | call_->DestroyVideoSendStream(stream_); |
| 1356 | delete config_.encoder_settings.encoder; |
| 1357 | } |
| 1358 | |
| 1359 | static void SetWebRtcFrameToBlack(webrtc::I420VideoFrame* video_frame) { |
| 1360 | assert(video_frame != NULL); |
| 1361 | memset(video_frame->buffer(webrtc::kYPlane), |
| 1362 | 16, |
| 1363 | video_frame->allocated_size(webrtc::kYPlane)); |
| 1364 | memset(video_frame->buffer(webrtc::kUPlane), |
| 1365 | 128, |
| 1366 | video_frame->allocated_size(webrtc::kUPlane)); |
| 1367 | memset(video_frame->buffer(webrtc::kVPlane), |
| 1368 | 128, |
| 1369 | video_frame->allocated_size(webrtc::kVPlane)); |
| 1370 | } |
| 1371 | |
| 1372 | static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame, |
| 1373 | int width, |
| 1374 | int height) { |
| 1375 | video_frame->CreateEmptyFrame( |
| 1376 | width, height, width, (width + 1) / 2, (width + 1) / 2); |
| 1377 | SetWebRtcFrameToBlack(video_frame); |
| 1378 | } |
| 1379 | |
| 1380 | static void ConvertToI420VideoFrame(const VideoFrame& frame, |
| 1381 | webrtc::I420VideoFrame* i420_frame) { |
| 1382 | i420_frame->CreateFrame( |
| 1383 | static_cast<int>(frame.GetYPitch() * frame.GetHeight()), |
| 1384 | frame.GetYPlane(), |
| 1385 | static_cast<int>(frame.GetUPitch() * ((frame.GetHeight() + 1) / 2)), |
| 1386 | frame.GetUPlane(), |
| 1387 | static_cast<int>(frame.GetVPitch() * ((frame.GetHeight() + 1) / 2)), |
| 1388 | frame.GetVPlane(), |
| 1389 | static_cast<int>(frame.GetWidth()), |
| 1390 | static_cast<int>(frame.GetHeight()), |
| 1391 | static_cast<int>(frame.GetYPitch()), |
| 1392 | static_cast<int>(frame.GetUPitch()), |
| 1393 | static_cast<int>(frame.GetVPitch())); |
| 1394 | } |
| 1395 | |
| 1396 | void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame( |
| 1397 | VideoCapturer* capturer, |
| 1398 | const VideoFrame* frame) { |
| 1399 | LOG(LS_VERBOSE) << "InputFrame: " << frame->GetWidth() << "x" |
| 1400 | << frame->GetHeight(); |
| 1401 | bool is_screencast = capturer->IsScreencast(); |
| 1402 | // Lock before copying, can be called concurrently when swapping input source. |
| 1403 | talk_base::CritScope frame_cs(&frame_lock_); |
| 1404 | if (!muted_) { |
| 1405 | ConvertToI420VideoFrame(*frame, &video_frame_); |
| 1406 | } else { |
| 1407 | // Create a tiny black frame to transmit instead. |
| 1408 | CreateBlackFrame(&video_frame_, 1, 1); |
| 1409 | is_screencast = false; |
| 1410 | } |
| 1411 | talk_base::CritScope cs(&lock_); |
| 1412 | if (format_.width == 0) { // Dropping frames. |
| 1413 | assert(format_.height == 0); |
| 1414 | LOG(LS_VERBOSE) << "VideoFormat 0x0 set, Dropping frame."; |
| 1415 | return; |
| 1416 | } |
| 1417 | // Reconfigure codec if necessary. |
| 1418 | if (is_screencast) { |
| 1419 | SetDimensions(video_frame_.width(), video_frame_.height()); |
| 1420 | } |
| 1421 | LOG(LS_VERBOSE) << "SwapFrame: " << video_frame_.width() << "x" |
| 1422 | << video_frame_.height() << " -> (codec) " |
| 1423 | << config_.encoder_settings.streams.back().width << "x" |
| 1424 | << config_.encoder_settings.streams.back().height; |
| 1425 | stream_->Input()->SwapFrame(&video_frame_); |
| 1426 | } |
| 1427 | |
| 1428 | bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( |
| 1429 | VideoCapturer* capturer) { |
| 1430 | if (!DisconnectCapturer() && capturer == NULL) { |
| 1431 | return false; |
| 1432 | } |
| 1433 | |
| 1434 | { |
| 1435 | talk_base::CritScope cs(&lock_); |
| 1436 | |
| 1437 | if (capturer == NULL) { |
| 1438 | LOG(LS_VERBOSE) << "Disabling capturer, sending black frame."; |
| 1439 | webrtc::I420VideoFrame black_frame; |
| 1440 | |
| 1441 | int width = format_.width; |
| 1442 | int height = format_.height; |
| 1443 | int half_width = (width + 1) / 2; |
| 1444 | black_frame.CreateEmptyFrame( |
| 1445 | width, height, width, half_width, half_width); |
| 1446 | SetWebRtcFrameToBlack(&black_frame); |
| 1447 | SetDimensions(width, height); |
| 1448 | stream_->Input()->SwapFrame(&black_frame); |
| 1449 | |
| 1450 | capturer_ = NULL; |
| 1451 | return true; |
| 1452 | } |
| 1453 | |
| 1454 | capturer_ = capturer; |
| 1455 | } |
| 1456 | // Lock cannot be held while connecting the capturer to prevent lock-order |
| 1457 | // violations. |
| 1458 | capturer->SignalVideoFrame.connect(this, &WebRtcVideoSendStream::InputFrame); |
| 1459 | return true; |
| 1460 | } |
| 1461 | |
| 1462 | bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoFormat( |
| 1463 | const VideoFormat& format) { |
| 1464 | if ((format.width == 0 || format.height == 0) && |
| 1465 | format.width != format.height) { |
| 1466 | LOG(LS_ERROR) << "Can't set VideoFormat, width or height is zero (but not " |
| 1467 | "both, 0x0 drops frames)."; |
| 1468 | return false; |
| 1469 | } |
| 1470 | |
| 1471 | talk_base::CritScope cs(&lock_); |
| 1472 | if (format.width == 0 && format.height == 0) { |
| 1473 | LOG(LS_INFO) |
| 1474 | << "0x0 resolution selected. Captured frames will be dropped for ssrc: " |
| 1475 | << config_.rtp.ssrcs[0] << "."; |
| 1476 | } else { |
| 1477 | // TODO(pbos): Fix me, this only affects the last stream! |
| 1478 | config_.encoder_settings.streams.back().max_framerate = |
| 1479 | VideoFormat::IntervalToFps(format.interval); |
| 1480 | SetDimensions(format.width, format.height); |
| 1481 | } |
| 1482 | |
| 1483 | format_ = format; |
| 1484 | return true; |
| 1485 | } |
| 1486 | |
| 1487 | bool WebRtcVideoChannel2::WebRtcVideoSendStream::MuteStream(bool mute) { |
| 1488 | talk_base::CritScope cs(&lock_); |
| 1489 | bool was_muted = muted_; |
| 1490 | muted_ = mute; |
| 1491 | return was_muted != mute; |
| 1492 | } |
| 1493 | |
| 1494 | bool WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectCapturer() { |
| 1495 | talk_base::CritScope cs(&lock_); |
| 1496 | if (capturer_ == NULL) { |
| 1497 | return false; |
| 1498 | } |
| 1499 | capturer_->SignalVideoFrame.disconnect(this); |
| 1500 | capturer_ = NULL; |
| 1501 | return true; |
| 1502 | } |
| 1503 | |
| 1504 | void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodec( |
| 1505 | const VideoOptions& options, |
| 1506 | const VideoCodecSettings& codec) { |
| 1507 | talk_base::CritScope cs(&lock_); |
| 1508 | webrtc::VideoEncoder* old_encoder = config_.encoder_settings.encoder; |
| 1509 | if (!encoder_factory_->CreateEncoderSettings( |
| 1510 | &config_.encoder_settings, |
| 1511 | options, |
| 1512 | codec.codec, |
| 1513 | config_.encoder_settings.streams.size())) { |
| 1514 | LOG(LS_ERROR) << "Could not create encoder settings for: '" |
| 1515 | << codec.codec.name |
| 1516 | << "'. This is most definitely a bug as SetCodec should only " |
| 1517 | "receive codecs which the encoder factory claims to " |
| 1518 | "support."; |
| 1519 | return; |
| 1520 | } |
| 1521 | format_ = VideoFormat(codec.codec.width, |
| 1522 | codec.codec.height, |
| 1523 | VideoFormat::FpsToInterval(30), |
| 1524 | FOURCC_I420); |
| 1525 | config_.rtp.fec = codec.fec; |
| 1526 | // TODO(pbos): Should changing RTX payload type be allowed? |
| 1527 | RecreateWebRtcStream(); |
| 1528 | delete old_encoder; |
| 1529 | } |
| 1530 | |
| 1531 | void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(int width, |
| 1532 | int height) { |
| 1533 | assert(!config_.encoder_settings.streams.empty()); |
| 1534 | LOG(LS_VERBOSE) << "SetDimensions: " << width << "x" << height; |
| 1535 | if (config_.encoder_settings.streams.back().width == width && |
| 1536 | config_.encoder_settings.streams.back().height == height) { |
| 1537 | return; |
| 1538 | } |
| 1539 | |
| 1540 | // TODO(pbos): Fix me, this only affects the last stream! |
| 1541 | config_.encoder_settings.streams.back().width = width; |
| 1542 | config_.encoder_settings.streams.back().height = height; |
| 1543 | // TODO(pbos): Last parameter shouldn't always be NULL? |
| 1544 | if (!stream_->ReconfigureVideoEncoder(config_.encoder_settings.streams, |
| 1545 | NULL)) { |
| 1546 | LOG(LS_WARNING) << "Failed to reconfigure video encoder for dimensions: " |
| 1547 | << width << "x" << height; |
| 1548 | return; |
| 1549 | } |
| 1550 | } |
| 1551 | |
| 1552 | void WebRtcVideoChannel2::WebRtcVideoSendStream::Start() { |
| 1553 | talk_base::CritScope cs(&lock_); |
| 1554 | stream_->Start(); |
| 1555 | sending_ = true; |
| 1556 | } |
| 1557 | |
| 1558 | void WebRtcVideoChannel2::WebRtcVideoSendStream::Stop() { |
| 1559 | talk_base::CritScope cs(&lock_); |
| 1560 | stream_->Stop(); |
| 1561 | sending_ = false; |
| 1562 | } |
| 1563 | |
| 1564 | void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() { |
| 1565 | if (stream_ != NULL) { |
| 1566 | call_->DestroyVideoSendStream(stream_); |
| 1567 | } |
| 1568 | stream_ = call_->CreateVideoSendStream(config_); |
| 1569 | if (sending_) { |
| 1570 | stream_->Start(); |
| 1571 | } |
| 1572 | } |
| 1573 | |
| 1574 | WebRtcVideoChannel2::VideoCodecSettings::VideoCodecSettings() |
| 1575 | : rtx_payload_type(-1) {} |
| 1576 | |
| 1577 | std::vector<WebRtcVideoChannel2::VideoCodecSettings> |
| 1578 | WebRtcVideoChannel2::MapCodecs(const std::vector<VideoCodec>& codecs) { |
| 1579 | assert(!codecs.empty()); |
| 1580 | |
| 1581 | std::vector<VideoCodecSettings> video_codecs; |
| 1582 | std::map<int, bool> payload_used; |
| 1583 | std::map<int, int> rtx_mapping; // video payload type -> rtx payload type. |
| 1584 | |
| 1585 | webrtc::FecConfig fec_settings; |
| 1586 | |
| 1587 | for (size_t i = 0; i < codecs.size(); ++i) { |
| 1588 | const VideoCodec& in_codec = codecs[i]; |
| 1589 | int payload_type = in_codec.id; |
| 1590 | |
| 1591 | if (payload_used[payload_type]) { |
| 1592 | LOG(LS_ERROR) << "Payload type already registered: " |
| 1593 | << in_codec.ToString(); |
| 1594 | return std::vector<VideoCodecSettings>(); |
| 1595 | } |
| 1596 | payload_used[payload_type] = true; |
| 1597 | |
| 1598 | switch (in_codec.GetCodecType()) { |
| 1599 | case VideoCodec::CODEC_RED: { |
| 1600 | // RED payload type, should not have duplicates. |
| 1601 | assert(fec_settings.red_payload_type == -1); |
| 1602 | fec_settings.red_payload_type = in_codec.id; |
| 1603 | continue; |
| 1604 | } |
| 1605 | |
| 1606 | case VideoCodec::CODEC_ULPFEC: { |
| 1607 | // ULPFEC payload type, should not have duplicates. |
| 1608 | assert(fec_settings.ulpfec_payload_type == -1); |
| 1609 | fec_settings.ulpfec_payload_type = in_codec.id; |
| 1610 | continue; |
| 1611 | } |
| 1612 | |
| 1613 | case VideoCodec::CODEC_RTX: { |
| 1614 | int associated_payload_type; |
| 1615 | if (!in_codec.GetParam(kCodecParamAssociatedPayloadType, |
| 1616 | &associated_payload_type)) { |
| 1617 | LOG(LS_ERROR) << "RTX codec without associated payload type: " |
| 1618 | << in_codec.ToString(); |
| 1619 | return std::vector<VideoCodecSettings>(); |
| 1620 | } |
| 1621 | rtx_mapping[associated_payload_type] = in_codec.id; |
| 1622 | continue; |
| 1623 | } |
| 1624 | |
| 1625 | case VideoCodec::CODEC_VIDEO: |
| 1626 | break; |
| 1627 | } |
| 1628 | |
| 1629 | video_codecs.push_back(VideoCodecSettings()); |
| 1630 | video_codecs.back().codec = in_codec; |
| 1631 | } |
| 1632 | |
| 1633 | // One of these codecs should have been a video codec. Only having FEC |
| 1634 | // parameters into this code is a logic error. |
| 1635 | assert(!video_codecs.empty()); |
| 1636 | |
| 1637 | // TODO(pbos): Write tests that figure out that I have not verified that RTX |
| 1638 | // codecs aren't mapped to bogus payloads. |
| 1639 | for (size_t i = 0; i < video_codecs.size(); ++i) { |
| 1640 | video_codecs[i].fec = fec_settings; |
| 1641 | if (rtx_mapping[video_codecs[i].codec.id] != 0) { |
| 1642 | video_codecs[i].rtx_payload_type = rtx_mapping[video_codecs[i].codec.id]; |
| 1643 | } |
| 1644 | } |
| 1645 | |
| 1646 | return video_codecs; |
| 1647 | } |
| 1648 | |
| 1649 | std::vector<WebRtcVideoChannel2::VideoCodecSettings> |
| 1650 | WebRtcVideoChannel2::FilterSupportedCodecs( |
| 1651 | const std::vector<WebRtcVideoChannel2::VideoCodecSettings>& mapped_codecs) { |
| 1652 | std::vector<VideoCodecSettings> supported_codecs; |
| 1653 | for (size_t i = 0; i < mapped_codecs.size(); ++i) { |
| 1654 | if (encoder_factory_->SupportsCodec(mapped_codecs[i].codec)) { |
| 1655 | supported_codecs.push_back(mapped_codecs[i]); |
| 1656 | } |
| 1657 | } |
| 1658 | return supported_codecs; |
| 1659 | } |
| 1660 | |
| 1661 | } // namespace cricket |
| 1662 | |
| 1663 | #endif // HAVE_WEBRTC_VIDEO |