niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
mflodman@webrtc.org | 0e703f4 | 2012-03-06 12:02:20 +0000 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
andrew@webrtc.org | 9841d92 | 2012-10-31 05:22:11 +0000 | [diff] [blame] | 11 | #include "webrtc/modules/video_render//incoming_video_stream.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 12 | |
pbos@webrtc.org | 12dc1a3 | 2013-08-05 16:22:53 +0000 | [diff] [blame] | 13 | #include <assert.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 14 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 15 | #if defined(_WIN32) |
| 16 | #include <windows.h> |
| 17 | #elif defined(WEBRTC_LINUX) |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 18 | #include <sys/time.h> |
pbos@webrtc.org | 12dc1a3 | 2013-08-05 16:22:53 +0000 | [diff] [blame] | 19 | #include <time.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 20 | #else |
| 21 | #include <sys/time.h> |
| 22 | #endif |
| 23 | |
pbos@webrtc.org | 5aa3f1b | 2013-07-12 08:12:08 +0000 | [diff] [blame] | 24 | #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
andrew@webrtc.org | 9841d92 | 2012-10-31 05:22:11 +0000 | [diff] [blame] | 25 | #include "webrtc/modules/video_render//video_render_frames.h" |
pbos@webrtc.org | 5aa3f1b | 2013-07-12 08:12:08 +0000 | [diff] [blame] | 26 | #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" |
| 27 | #include "webrtc/system_wrappers/interface/event_wrapper.h" |
pbos@webrtc.org | 5aa3f1b | 2013-07-12 08:12:08 +0000 | [diff] [blame] | 28 | #include "webrtc/system_wrappers/interface/thread_wrapper.h" |
| 29 | #include "webrtc/system_wrappers/interface/tick_util.h" |
| 30 | #include "webrtc/system_wrappers/interface/trace.h" |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 31 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 32 | namespace webrtc { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 33 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 34 | IncomingVideoStream::IncomingVideoStream(const int32_t module_id, |
| 35 | const uint32_t stream_id) |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 36 | : module_id_(module_id), |
| 37 | stream_id_(stream_id), |
| 38 | stream_critsect_(*CriticalSectionWrapper::CreateCriticalSection()), |
| 39 | thread_critsect_(*CriticalSectionWrapper::CreateCriticalSection()), |
| 40 | buffer_critsect_(*CriticalSectionWrapper::CreateCriticalSection()), |
| 41 | incoming_render_thread_(), |
| 42 | deliver_buffer_event_(*EventWrapper::Create()), |
| 43 | running_(false), |
| 44 | external_callback_(NULL), |
| 45 | render_callback_(NULL), |
| 46 | render_buffers_(*(new VideoRenderFrames)), |
| 47 | callbackVideoType_(kVideoI420), |
| 48 | callbackWidth_(0), |
| 49 | callbackHeight_(0), |
| 50 | incoming_rate_(0), |
| 51 | last_rate_calculation_time_ms_(0), |
| 52 | num_frames_since_last_calculation_(0), |
| 53 | last_rendered_frame_(), |
| 54 | temp_frame_(), |
| 55 | start_image_(), |
| 56 | timeout_image_(), |
| 57 | timeout_time_(), |
| 58 | mirror_frames_enabled_(false), |
| 59 | mirroring_(), |
| 60 | transformed_video_frame_() { |
| 61 | WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_, |
| 62 | "%s created for stream %d", __FUNCTION__, stream_id); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 63 | } |
| 64 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 65 | IncomingVideoStream::~IncomingVideoStream() { |
| 66 | WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_, |
| 67 | "%s deleted for stream %d", __FUNCTION__, stream_id_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 68 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 69 | Stop(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 70 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 71 | // incoming_render_thread_ - Delete in stop |
| 72 | delete &render_buffers_; |
| 73 | delete &stream_critsect_; |
| 74 | delete &buffer_critsect_; |
| 75 | delete &thread_critsect_; |
| 76 | delete &deliver_buffer_event_; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 77 | } |
| 78 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 79 | int32_t IncomingVideoStream::ChangeModuleId(const int32_t id) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 80 | CriticalSectionScoped cs(&stream_critsect_); |
| 81 | module_id_ = id; |
| 82 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 83 | } |
| 84 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 85 | VideoRenderCallback* IncomingVideoStream::ModuleCallback() { |
| 86 | CriticalSectionScoped cs(&stream_critsect_); |
| 87 | return this; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 88 | } |
| 89 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 90 | int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id, |
| 91 | I420VideoFrame& video_frame) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 92 | CriticalSectionScoped csS(&stream_critsect_); |
| 93 | WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, |
| 94 | "%s for stream %d, render time: %u", __FUNCTION__, stream_id_, |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 95 | video_frame.render_time_ms()); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 96 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 97 | if (!running_) { |
| 98 | WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, |
| 99 | "%s: Not running", __FUNCTION__); |
| 100 | return -1; |
| 101 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 102 | |
wu@webrtc.org | 9dba525 | 2013-08-05 20:36:57 +0000 | [diff] [blame] | 103 | // Mirroring is not supported if the frame is backed by a texture. |
| 104 | if (true == mirror_frames_enabled_ && video_frame.native_handle() == NULL) { |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 105 | transformed_video_frame_.CreateEmptyFrame(video_frame.width(), |
| 106 | video_frame.height(), |
| 107 | video_frame.stride(kYPlane), |
| 108 | video_frame.stride(kUPlane), |
| 109 | video_frame.stride(kVPlane)); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 110 | if (mirroring_.mirror_x_axis) { |
mikhal@webrtc.org | 2338131 | 2012-09-27 15:36:15 +0000 | [diff] [blame] | 111 | MirrorI420UpDown(&video_frame, |
| 112 | &transformed_video_frame_); |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 113 | video_frame.SwapFrame(&transformed_video_frame_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 114 | } |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 115 | if (mirroring_.mirror_y_axis) { |
mikhal@webrtc.org | 2338131 | 2012-09-27 15:36:15 +0000 | [diff] [blame] | 116 | MirrorI420LeftRight(&video_frame, |
| 117 | &transformed_video_frame_); |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 118 | video_frame.SwapFrame(&transformed_video_frame_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 119 | } |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 120 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 121 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 122 | // Rate statistics. |
| 123 | num_frames_since_last_calculation_++; |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 124 | int64_t now_ms = TickTime::MillisecondTimestamp(); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 125 | if (now_ms >= last_rate_calculation_time_ms_ + KFrameRatePeriodMs) { |
| 126 | incoming_rate_ = |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 127 | static_cast<uint32_t>(1000 * num_frames_since_last_calculation_ / |
| 128 | (now_ms - last_rate_calculation_time_ms_)); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 129 | num_frames_since_last_calculation_ = 0; |
| 130 | last_rate_calculation_time_ms_ = now_ms; |
| 131 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 132 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 133 | // Insert frame. |
| 134 | CriticalSectionScoped csB(&buffer_critsect_); |
| 135 | if (render_buffers_.AddFrame(&video_frame) == 1) |
| 136 | deliver_buffer_event_.Set(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 137 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 138 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 139 | } |
| 140 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 141 | int32_t IncomingVideoStream::SetStartImage( |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 142 | const I420VideoFrame& video_frame) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 143 | CriticalSectionScoped csS(&thread_critsect_); |
| 144 | return start_image_.CopyFrame(video_frame); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 145 | } |
| 146 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 147 | int32_t IncomingVideoStream::SetTimeoutImage( |
| 148 | const I420VideoFrame& video_frame, const uint32_t timeout) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 149 | CriticalSectionScoped csS(&thread_critsect_); |
| 150 | timeout_time_ = timeout; |
| 151 | return timeout_image_.CopyFrame(video_frame); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 152 | } |
| 153 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 154 | int32_t IncomingVideoStream::SetRenderCallback( |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 155 | VideoRenderCallback* render_callback) { |
| 156 | CriticalSectionScoped cs(&stream_critsect_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 157 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 158 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 159 | "%s(%x) for stream %d", __FUNCTION__, render_callback, |
| 160 | stream_id_); |
| 161 | render_callback_ = render_callback; |
| 162 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 163 | } |
| 164 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 165 | int32_t IncomingVideoStream::EnableMirroring(const bool enable, |
| 166 | const bool mirror_x_axis, |
| 167 | const bool mirror_y_axis) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 168 | CriticalSectionScoped cs(&stream_critsect_); |
| 169 | mirror_frames_enabled_ = enable; |
| 170 | mirroring_.mirror_x_axis = mirror_x_axis; |
| 171 | mirroring_.mirror_y_axis = mirror_y_axis; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 172 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 173 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 174 | } |
| 175 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 176 | int32_t IncomingVideoStream::SetExpectedRenderDelay( |
| 177 | int32_t delay_ms) { |
mflodman@webrtc.org | f4f2145 | 2012-09-28 11:27:35 +0000 | [diff] [blame] | 178 | CriticalSectionScoped csS(&stream_critsect_); |
| 179 | if (running_) { |
| 180 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 181 | "%s(%d) for stream %d", __FUNCTION__, delay_ms, stream_id_); |
| 182 | return -1; |
| 183 | } |
mflodman@webrtc.org | aeb37d3 | 2012-10-11 16:31:00 +0000 | [diff] [blame] | 184 | CriticalSectionScoped cs(&buffer_critsect_); |
mflodman@webrtc.org | f4f2145 | 2012-09-28 11:27:35 +0000 | [diff] [blame] | 185 | return render_buffers_.SetRenderDelay(delay_ms); |
| 186 | } |
| 187 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 188 | int32_t IncomingVideoStream::SetExternalCallback( |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 189 | VideoRenderCallback* external_callback) { |
| 190 | CriticalSectionScoped cs(&stream_critsect_); |
| 191 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 192 | "%s(%x) for stream %d", __FUNCTION__, external_callback, |
| 193 | stream_id_); |
| 194 | external_callback_ = external_callback; |
| 195 | callbackVideoType_ = kVideoI420; |
| 196 | callbackWidth_ = 0; |
| 197 | callbackHeight_ = 0; |
| 198 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 199 | } |
| 200 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 201 | int32_t IncomingVideoStream::Start() { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 202 | CriticalSectionScoped csS(&stream_critsect_); |
| 203 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 204 | "%s for stream %d", __FUNCTION__, stream_id_); |
| 205 | if (running_) { |
| 206 | WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_, |
| 207 | "%s: Already running", __FUNCTION__); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 208 | return 0; |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 209 | } |
| 210 | |
| 211 | CriticalSectionScoped csT(&thread_critsect_); |
| 212 | assert(incoming_render_thread_ == NULL); |
| 213 | |
| 214 | incoming_render_thread_ = ThreadWrapper::CreateThread( |
| 215 | IncomingVideoStreamThreadFun, this, kRealtimePriority, |
| 216 | "IncomingVideoStreamThread"); |
| 217 | if (!incoming_render_thread_) { |
| 218 | WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_, |
| 219 | "%s: No thread", __FUNCTION__); |
| 220 | return -1; |
| 221 | } |
| 222 | |
| 223 | unsigned int t_id = 0; |
| 224 | if (incoming_render_thread_->Start(t_id)) { |
| 225 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 226 | "%s: thread started: %u", __FUNCTION__, t_id); |
| 227 | } else { |
| 228 | WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_, |
| 229 | "%s: Could not start send thread", __FUNCTION__); |
| 230 | return -1; |
| 231 | } |
| 232 | deliver_buffer_event_.StartTimer(false, KEventStartupTimeMS); |
| 233 | |
| 234 | running_ = true; |
| 235 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 236 | } |
| 237 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 238 | int32_t IncomingVideoStream::Stop() { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 239 | CriticalSectionScoped cs_stream(&stream_critsect_); |
| 240 | WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_, |
| 241 | "%s for stream %d", __FUNCTION__, stream_id_); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 242 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 243 | if (!running_) { |
| 244 | WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_, |
| 245 | "%s: Not running", __FUNCTION__); |
| 246 | return 0; |
| 247 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 248 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 249 | thread_critsect_.Enter(); |
| 250 | if (incoming_render_thread_) { |
| 251 | ThreadWrapper* thread = incoming_render_thread_; |
| 252 | incoming_render_thread_ = NULL; |
| 253 | thread->SetNotAlive(); |
| 254 | #ifndef WIN32_ |
| 255 | deliver_buffer_event_.StopTimer(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 256 | #endif |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 257 | thread_critsect_.Leave(); |
| 258 | if (thread->Stop()) { |
| 259 | delete thread; |
| 260 | } else { |
| 261 | assert(false); |
| 262 | WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_, |
| 263 | "%s: Not able to stop thread, leaking", __FUNCTION__); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 264 | } |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 265 | } else { |
| 266 | thread_critsect_.Leave(); |
| 267 | } |
| 268 | running_ = false; |
| 269 | return 0; |
| 270 | } |
| 271 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 272 | int32_t IncomingVideoStream::Reset() { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 273 | CriticalSectionScoped cs_stream(&stream_critsect_); |
| 274 | CriticalSectionScoped cs_buffer(&buffer_critsect_); |
| 275 | render_buffers_.ReleaseAllFrames(); |
| 276 | return 0; |
| 277 | } |
| 278 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 279 | uint32_t IncomingVideoStream::StreamId() const { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 280 | CriticalSectionScoped cs_stream(&stream_critsect_); |
| 281 | return stream_id_; |
| 282 | } |
| 283 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 284 | uint32_t IncomingVideoStream::IncomingRate() const { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 285 | CriticalSectionScoped cs(&stream_critsect_); |
| 286 | return incoming_rate_; |
| 287 | } |
| 288 | |
| 289 | bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj) { |
| 290 | return static_cast<IncomingVideoStream*>(obj)->IncomingVideoStreamProcess(); |
| 291 | } |
| 292 | |
| 293 | bool IncomingVideoStream::IncomingVideoStreamProcess() { |
| 294 | if (kEventError != deliver_buffer_event_.Wait(KEventMaxWaitTimeMs)) { |
vikasmarwaha@webrtc.org | 455370d | 2013-03-20 16:57:09 +0000 | [diff] [blame] | 295 | thread_critsect_.Enter(); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 296 | if (incoming_render_thread_ == NULL) { |
| 297 | // Terminating |
vikasmarwaha@webrtc.org | 455370d | 2013-03-20 16:57:09 +0000 | [diff] [blame] | 298 | thread_critsect_.Leave(); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 299 | return false; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 300 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 301 | |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 302 | I420VideoFrame* frame_to_render = NULL; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 303 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 304 | // Get a new frame to render and the time for the frame after this one. |
| 305 | buffer_critsect_.Enter(); |
| 306 | frame_to_render = render_buffers_.FrameToRender(); |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 307 | uint32_t wait_time = render_buffers_.TimeToNextFrameRelease(); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 308 | buffer_critsect_.Leave(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 309 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 310 | // Set timer for next frame to render. |
| 311 | if (wait_time > KEventMaxWaitTimeMs) { |
| 312 | wait_time = KEventMaxWaitTimeMs; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 313 | } |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 314 | deliver_buffer_event_.StartTimer(false, wait_time); |
| 315 | |
| 316 | if (!frame_to_render) { |
| 317 | if (render_callback_) { |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 318 | if (last_rendered_frame_.render_time_ms() == 0 && |
| 319 | !start_image_.IsZeroSize()) { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 320 | // We have not rendered anything and have a start image. |
| 321 | temp_frame_.CopyFrame(start_image_); |
| 322 | render_callback_->RenderFrame(stream_id_, temp_frame_); |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 323 | } else if (!timeout_image_.IsZeroSize() && |
| 324 | last_rendered_frame_.render_time_ms() + timeout_time_ < |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 325 | TickTime::MillisecondTimestamp()) { |
| 326 | // Render a timeout image. |
| 327 | temp_frame_.CopyFrame(timeout_image_); |
| 328 | render_callback_->RenderFrame(stream_id_, temp_frame_); |
| 329 | } |
| 330 | } |
| 331 | |
| 332 | // No frame. |
| 333 | thread_critsect_.Leave(); |
| 334 | return true; |
| 335 | } |
| 336 | |
| 337 | // Send frame for rendering. |
| 338 | if (external_callback_) { |
| 339 | WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, |
| 340 | "%s: executing external renderer callback to deliver frame", |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 341 | __FUNCTION__, frame_to_render->render_time_ms()); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 342 | external_callback_->RenderFrame(stream_id_, *frame_to_render); |
| 343 | } else { |
| 344 | if (render_callback_) { |
| 345 | WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, |
| 346 | "%s: Render frame, time: ", __FUNCTION__, |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 347 | frame_to_render->render_time_ms()); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 348 | render_callback_->RenderFrame(stream_id_, *frame_to_render); |
| 349 | } |
| 350 | } |
| 351 | |
| 352 | // Release critsect before calling the module user. |
| 353 | thread_critsect_.Leave(); |
| 354 | |
| 355 | // We're done with this frame, delete it. |
| 356 | if (frame_to_render) { |
| 357 | CriticalSectionScoped cs(&buffer_critsect_); |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 358 | last_rendered_frame_.SwapFrame(frame_to_render); |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 359 | render_buffers_.ReturnFrame(frame_to_render); |
| 360 | } |
| 361 | } |
| 362 | return true; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 363 | } |
| 364 | |
pbos@webrtc.org | ddf94e7 | 2013-04-10 08:09:04 +0000 | [diff] [blame] | 365 | int32_t IncomingVideoStream::GetLastRenderedFrame( |
mikhal@webrtc.org | 9fedff7 | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 366 | I420VideoFrame& video_frame) const { |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 367 | CriticalSectionScoped cs(&buffer_critsect_); |
| 368 | return video_frame.CopyFrame(last_rendered_frame_); |
| 369 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 370 | |
mflodman@webrtc.org | 327ada1 | 2012-05-30 10:45:18 +0000 | [diff] [blame] | 371 | } // namespace webrtc |