blob: 566638125b86c1a96cba46b7056d7bdab3457518 [file] [log] [blame]
pbos@webrtc.orgaf8d5af2013-07-09 08:02:33 +00001/*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10#include <stdio.h>
11
12#include <deque>
13#include <map>
14
15#include "testing/gtest/include/gtest/gtest.h"
16#include "gflags/gflags.h"
17
18#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
19#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
20#include "webrtc/system_wrappers/interface/clock.h"
21#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
22#include "webrtc/system_wrappers/interface/event_wrapper.h"
23#include "webrtc/system_wrappers/interface/scoped_ptr.h"
24#include "webrtc/test/testsupport/fileutils.h"
25#include "webrtc/typedefs.h"
26#include "webrtc/video_engine/new_include/video_engine.h"
27#include "webrtc/video_engine/test/common/direct_transport.h"
28#include "webrtc/video_engine/test/common/file_capturer.h"
29#include "webrtc/video_engine/test/common/frame_generator_capturer.h"
30#include "webrtc/video_engine/test/common/generate_ssrcs.h"
31#include "webrtc/video_engine/test/common/statistics.h"
32#include "webrtc/video_engine/test/common/video_renderer.h"
33
34DEFINE_int32(seconds, 10, "Seconds to run each clip.");
35
36namespace webrtc {
37
38struct FullStackTestParams {
39 const char* test_label;
40 struct {
41 const char* name;
42 size_t width, height, fps;
43 } clip;
44 size_t bitrate;
45 double avg_psnr_threshold;
46 double avg_ssim_threshold;
47};
48
49FullStackTestParams paris_qcif = {"net_delay_0_0_plr_0",
50 {"paris_qcif", 176, 144, 30}, 300, 36.0,
51 0.96};
52
53// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
54FullStackTestParams foreman_cif = {"foreman_cif_net_delay_0_0_plr_0",
55 {"foreman_cif", 352, 288, 30}, 700, 0.0,
56 0.0};
57
58class FullStackTest : public ::testing::TestWithParam<FullStackTestParams> {
59 protected:
60 std::map<uint32_t, bool> reserved_ssrcs_;
61};
62
63class VideoAnalyzer : public newapi::PacketReceiver,
64 public newapi::Transport,
65 public newapi::VideoRenderer,
66 public newapi::VideoSendStreamInput {
67 public:
68 VideoAnalyzer(newapi::VideoSendStreamInput* input,
69 newapi::Transport* transport,
70 newapi::VideoRenderer* loopback_video,
71 const char* test_label,
72 double avg_psnr_threshold,
73 double avg_ssim_threshold,
74 uint64_t duration_frames)
75 : input_(input),
76 transport_(transport),
77 renderer_(loopback_video),
78 receiver_(NULL),
79 test_label_(test_label),
80 rtp_timestamp_delta_(0),
81 first_send_frame_(NULL),
82 last_render_time_(0),
83 avg_psnr_threshold_(avg_psnr_threshold),
84 avg_ssim_threshold_(avg_ssim_threshold),
85 frames_left_(duration_frames),
86 crit_(CriticalSectionWrapper::CreateCriticalSection()),
87 trigger_(EventWrapper::Create()) {}
88
89 ~VideoAnalyzer() {
90 while (!frames_.empty()) {
91 delete frames_.back();
92 frames_.pop_back();
93 }
94 while (!frame_pool_.empty()) {
95 delete frame_pool_.back();
96 frame_pool_.pop_back();
97 }
98 }
99
100 virtual bool DeliverPacket(const void* packet, size_t length) OVERRIDE {
101 scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
102 RTPHeader header;
103 parser->Parse(
104 static_cast<const uint8_t*>(packet), static_cast<int>(length), &header);
105 {
106 CriticalSectionScoped cs(crit_.get());
107 recv_times_[header.timestamp - rtp_timestamp_delta_] =
108 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
109 }
110
111 return receiver_->DeliverPacket(packet, length);
112 }
113
114 virtual void PutFrame(const I420VideoFrame& video_frame,
115 uint32_t delta_capture_ms) OVERRIDE {
116 I420VideoFrame* copy = NULL;
117 {
118 CriticalSectionScoped cs(crit_.get());
119 if (frame_pool_.size() > 0) {
120 copy = frame_pool_.front();
121 frame_pool_.pop_front();
122 }
123 }
124 if (copy == NULL)
125 copy = new I420VideoFrame();
126
127 copy->CopyFrame(video_frame);
128 copy->set_timestamp(copy->render_time_ms() * 90);
129
130 {
131 CriticalSectionScoped cs(crit_.get());
132 if (first_send_frame_ == NULL && rtp_timestamp_delta_ == 0)
133 first_send_frame_ = copy;
134
135 frames_.push_back(copy);
136 }
137
138 input_->PutFrame(video_frame, delta_capture_ms);
139 }
140
141 virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
142 scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
143 RTPHeader header;
144 parser->Parse(packet, static_cast<int>(length), &header);
145
146 {
147 CriticalSectionScoped cs(crit_.get());
148 if (rtp_timestamp_delta_ == 0) {
149 rtp_timestamp_delta_ =
150 header.timestamp - first_send_frame_->timestamp();
151 first_send_frame_ = NULL;
152 send_times_[header.timestamp - rtp_timestamp_delta_] =
153 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
154 }
155 }
156
157 return transport_->SendRTP(packet, length);
158 }
159
160 virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
161 return transport_->SendRTCP(packet, length);
162 }
163
164 virtual void RenderFrame(const I420VideoFrame& video_frame,
165 int time_to_render_ms) OVERRIDE {
166 uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
167
168 {
169 CriticalSectionScoped cs(crit_.get());
170 while (frames_.front()->timestamp() < send_timestamp) {
171 AddFrameComparison(frames_.front(), &last_rendered_frame_, true);
172 frame_pool_.push_back(frames_.front());
173 frames_.pop_front();
174 }
175
176 I420VideoFrame* reference_frame = frames_.front();
177 frames_.pop_front();
178 assert(reference_frame != NULL);
179 assert(reference_frame->timestamp() == send_timestamp);
180
181 AddFrameComparison(reference_frame, &video_frame, false);
182 frame_pool_.push_back(reference_frame);
183
184 if (--frames_left_ == 0) {
185 PrintResult("psnr", psnr_, " dB");
186 PrintResult("ssim", ssim_, "");
187 PrintResult("sender_time", sender_time_, " ms");
188 PrintResult("receiver_time", receiver_time_, " ms");
189 PrintResult("total_delay_incl_network", end_to_end_, " ms");
190 PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
191 EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
192 EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
193 trigger_->Set();
194 }
195 }
196
197 renderer_->RenderFrame(video_frame, time_to_render_ms);
198 last_rendered_frame_.CopyFrame(video_frame);
199 }
200
201 void Wait() { trigger_->Wait(WEBRTC_EVENT_INFINITE); }
202
203 newapi::VideoSendStreamInput* input_;
204 newapi::Transport* transport_;
205 newapi::VideoRenderer* renderer_;
206 newapi::PacketReceiver* receiver_;
207
208 private:
209 void AddFrameComparison(const I420VideoFrame* reference_frame,
210 const I420VideoFrame* render,
211 bool dropped) {
212 int64_t render_time = Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
213 psnr_.AddSample(I420PSNR(reference_frame, render));
214 ssim_.AddSample(I420SSIM(reference_frame, render));
215 if (dropped)
216 return;
217 if (last_render_time_ != 0)
218 rendered_delta_.AddSample(render_time - last_render_time_);
219 last_render_time_ = render_time;
220
221 int64_t input_time = reference_frame->render_time_ms();
222 int64_t send_time = send_times_[reference_frame->timestamp()];
223 send_times_.erase(reference_frame->timestamp());
224 sender_time_.AddSample(send_time - input_time);
225 int64_t recv_time = recv_times_[reference_frame->timestamp()];
226 recv_times_.erase(reference_frame->timestamp());
227 receiver_time_.AddSample(render_time - recv_time);
228 end_to_end_.AddSample(render_time - input_time);
229 }
230
231 void PrintResult(const char* result_type,
232 test::Statistics stats,
233 const char* unit) {
234 printf("RESULT %s: %s = {%f, %f}%s\n",
235 result_type,
236 test_label_,
237 stats.Mean(),
238 stats.StandardDeviation(),
239 unit);
240 }
241
242 const char* test_label_;
243 test::Statistics sender_time_;
244 test::Statistics receiver_time_;
245 test::Statistics psnr_;
246 test::Statistics ssim_;
247 test::Statistics end_to_end_;
248 test::Statistics rendered_delta_;
249
250 std::deque<I420VideoFrame*> frames_;
251 std::deque<I420VideoFrame*> frame_pool_;
252 I420VideoFrame last_rendered_frame_;
253 std::map<uint32_t, int64_t> send_times_;
254 std::map<uint32_t, int64_t> recv_times_;
255 uint32_t rtp_timestamp_delta_;
256 I420VideoFrame* first_send_frame_;
257 int64_t last_render_time_;
258 double avg_psnr_threshold_;
259 double avg_ssim_threshold_;
260 uint32_t frames_left_;
261 scoped_ptr<CriticalSectionWrapper> crit_;
262 scoped_ptr<EventWrapper> trigger_;
263};
264
265TEST_P(FullStackTest, NoPacketLoss) {
266 FullStackTestParams params = GetParam();
267
268 scoped_ptr<test::VideoRenderer> local_preview(test::VideoRenderer::Create(
269 "Local Preview", params.clip.width, params.clip.height));
270 scoped_ptr<test::VideoRenderer> loopback_video(test::VideoRenderer::Create(
271 "Loopback Video", params.clip.width, params.clip.height));
272
273 scoped_ptr<newapi::VideoEngine> video_engine(
274 newapi::VideoEngine::Create(newapi::VideoEngineConfig()));
275
276 test::DirectTransport transport(NULL);
277 VideoAnalyzer analyzer(NULL,
278 &transport,
279 loopback_video.get(),
280 params.test_label,
281 params.avg_psnr_threshold,
282 params.avg_ssim_threshold,
283 FLAGS_seconds * params.clip.fps);
284
285 scoped_ptr<newapi::VideoCall> call(video_engine->CreateCall(&analyzer));
286 analyzer.receiver_ = call->Receiver();
287 transport.SetReceiver(&analyzer);
288
289 newapi::VideoSendStream::Config send_config = call->GetDefaultSendConfig();
290 test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_);
291
292 send_config.local_renderer = local_preview.get();
293
294 // TODO(pbos): static_cast shouldn't be required after mflodman refactors the
295 // VideoCodec struct.
296 send_config.codec.width = static_cast<uint16_t>(params.clip.width);
297 send_config.codec.height = static_cast<uint16_t>(params.clip.height);
298 send_config.codec.minBitrate = params.bitrate;
299 send_config.codec.startBitrate = params.bitrate;
300 send_config.codec.maxBitrate = params.bitrate;
301
302 newapi::VideoSendStream* send_stream = call->CreateSendStream(send_config);
303 analyzer.input_ = send_stream->Input();
304
305 Clock* test_clock = Clock::GetRealTimeClock();
306
307 scoped_ptr<test::YuvFileFrameGenerator> file_frame_generator(
308 test::YuvFileFrameGenerator::Create(
309 test::ResourcePath(params.clip.name, "yuv").c_str(),
310 params.clip.width,
311 params.clip.height,
312 test_clock));
313 ASSERT_TRUE(file_frame_generator.get() != NULL);
314
315 scoped_ptr<test::FrameGeneratorCapturer> file_capturer(
316 test::FrameGeneratorCapturer::Create(
317 &analyzer, file_frame_generator.get(), params.clip.fps));
318 ASSERT_TRUE(file_capturer.get() != NULL);
319
320 newapi::VideoReceiveStream::Config receive_config =
321 call->GetDefaultReceiveConfig();
322 receive_config.rtp.ssrc = send_config.rtp.ssrcs[0];
323 receive_config.renderer = &analyzer;
324
325 newapi::VideoReceiveStream* receive_stream =
326 call->CreateReceiveStream(receive_config);
327
328 receive_stream->StartReceive();
329 send_stream->StartSend();
330
331 file_capturer->Start();
332
333 analyzer.Wait();
334
335 file_capturer->Stop();
336 send_stream->StopSend();
337 receive_stream->StopReceive();
338
339 call->DestroyReceiveStream(receive_stream);
340 call->DestroySendStream(send_stream);
341}
342
343INSTANTIATE_TEST_CASE_P(FullStack,
344 FullStackTest,
345 ::testing::Values(paris_qcif, foreman_cif));
346
347} // namespace webrtc