blob: 70dedc9256b1bfa6730d78cd24ecd2eaf54f9236 [file] [log] [blame]
Emircan Uysaler901e0ff2018-06-26 12:22:38 -07001/*
2 * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <math.h>
12#include <string.h>
13
14#include "api/video/i010_buffer.h"
15#include "api/video/i420_buffer.h"
16#include "api/video/video_frame.h"
17#include "rtc_base/bind.h"
Steve Anton10542f22019-01-11 09:11:00 -080018#include "rtc_base/time_utils.h"
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070019#include "test/fake_texture_frame.h"
20#include "test/frame_utils.h"
21#include "test/gtest.h"
22
23namespace webrtc {
24
25namespace {
26
27// Helper class to delegate calls to appropriate container.
28class PlanarYuvBufferFactory {
29 public:
30 static rtc::scoped_refptr<PlanarYuvBuffer> Create(VideoFrameBuffer::Type type,
31 int width,
32 int height) {
33 switch (type) {
34 case VideoFrameBuffer::Type::kI420:
35 return I420Buffer::Create(width, height);
36 case VideoFrameBuffer::Type::kI010:
37 return I010Buffer::Create(width, height);
38 default:
39 RTC_NOTREACHED();
40 }
41 return nullptr;
42 }
43
44 static rtc::scoped_refptr<PlanarYuvBuffer> Copy(const VideoFrameBuffer& src) {
45 switch (src.type()) {
46 case VideoFrameBuffer::Type::kI420:
47 return I420Buffer::Copy(src);
48 case VideoFrameBuffer::Type::kI010:
49 return I010Buffer::Copy(*src.GetI010());
50 default:
51 RTC_NOTREACHED();
52 }
53 return nullptr;
54 }
55
56 static rtc::scoped_refptr<PlanarYuvBuffer> Rotate(const VideoFrameBuffer& src,
57 VideoRotation rotation) {
58 switch (src.type()) {
59 case VideoFrameBuffer::Type::kI420:
60 return I420Buffer::Rotate(src, rotation);
61 case VideoFrameBuffer::Type::kI010:
62 return I010Buffer::Rotate(*src.GetI010(), rotation);
63 default:
64 RTC_NOTREACHED();
65 }
66 return nullptr;
67 }
68
69 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
70 const VideoFrameBuffer& src,
71 int offset_x,
72 int offset_y,
73 int crop_width,
74 int crop_height) {
75 switch (src.type()) {
76 case VideoFrameBuffer::Type::kI420: {
77 rtc::scoped_refptr<I420Buffer> buffer =
78 I420Buffer::Create(crop_width, crop_height);
79 buffer->CropAndScaleFrom(*src.GetI420(), offset_x, offset_y, crop_width,
80 crop_height);
81 return buffer;
82 }
83 case VideoFrameBuffer::Type::kI010: {
84 rtc::scoped_refptr<I010Buffer> buffer =
85 I010Buffer::Create(crop_width, crop_height);
86 buffer->CropAndScaleFrom(*src.GetI010(), offset_x, offset_y, crop_width,
87 crop_height);
88 return buffer;
89 }
90 default:
91 RTC_NOTREACHED();
92 }
93 return nullptr;
94 }
95
96 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
97 const VideoFrameBuffer& src,
98 int crop_width,
99 int crop_height) {
100 const int out_width =
101 std::min(src.width(), crop_width * src.height() / crop_height);
102 const int out_height =
103 std::min(src.height(), crop_height * src.width() / crop_width);
104 return CropAndScaleFrom(src, (src.width() - out_width) / 2,
105 (src.height() - out_height) / 2, out_width,
106 out_height);
107 }
108
109 static rtc::scoped_refptr<PlanarYuvBuffer>
110 ScaleFrom(const VideoFrameBuffer& src, int crop_width, int crop_height) {
111 switch (src.type()) {
112 case VideoFrameBuffer::Type::kI420: {
113 rtc::scoped_refptr<I420Buffer> buffer =
114 I420Buffer::Create(crop_width, crop_height);
115 buffer->ScaleFrom(*src.GetI420());
116 return buffer;
117 }
118 case VideoFrameBuffer::Type::kI010: {
119 rtc::scoped_refptr<I010Buffer> buffer =
120 I010Buffer::Create(crop_width, crop_height);
121 buffer->ScaleFrom(*src.GetI010());
122 return buffer;
123 }
124 default:
125 RTC_NOTREACHED();
126 }
127 return nullptr;
128 }
129};
130
131rtc::scoped_refptr<PlanarYuvBuffer> CreateGradient(VideoFrameBuffer::Type type,
132 int width,
133 int height) {
134 rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
135 // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
136 for (int x = 0; x < width; x++) {
137 for (int y = 0; y < height; y++) {
138 buffer->MutableDataY()[x + y * width] =
139 128 * (x * height + y * width) / (width * height);
140 }
141 }
142 int chroma_width = buffer->ChromaWidth();
143 int chroma_height = buffer->ChromaHeight();
144 for (int x = 0; x < chroma_width; x++) {
145 for (int y = 0; y < chroma_height; y++) {
146 buffer->MutableDataU()[x + y * chroma_width] =
147 255 * x / (chroma_width - 1);
148 buffer->MutableDataV()[x + y * chroma_width] =
149 255 * y / (chroma_height - 1);
150 }
151 }
152 if (type == VideoFrameBuffer::Type::kI420)
153 return buffer;
154
155 RTC_DCHECK(type == VideoFrameBuffer::Type::kI010);
156 return I010Buffer::Copy(*buffer);
157}
158
159// The offsets and sizes describe the rectangle extracted from the
160// original (gradient) frame, in relative coordinates where the
161// original frame correspond to the unit square, 0.0 <= x, y < 1.0.
162void CheckCrop(const webrtc::I420BufferInterface& frame,
163 double offset_x,
164 double offset_y,
165 double rel_width,
166 double rel_height) {
167 int width = frame.width();
168 int height = frame.height();
169 // Check that pixel values in the corners match the gradient used
170 // for initialization.
171 for (int i = 0; i < 2; i++) {
172 for (int j = 0; j < 2; j++) {
173 // Pixel coordinates of the corner.
174 int x = i * (width - 1);
175 int y = j * (height - 1);
176 // Relative coordinates, range 0.0 - 1.0 correspond to the
177 // size of the uncropped input frame.
178 double orig_x = offset_x + i * rel_width;
179 double orig_y = offset_y + j * rel_height;
180
181 EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0,
182 (orig_x + orig_y) / 2, 0.02);
183 EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0,
184 orig_x, 0.02);
185 EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0,
186 orig_y, 0.02);
187 }
188 }
189}
190
191void CheckRotate(int width,
192 int height,
193 webrtc::VideoRotation rotation,
194 const webrtc::I420BufferInterface& rotated) {
195 int rotated_width = width;
196 int rotated_height = height;
197
198 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
199 std::swap(rotated_width, rotated_height);
200 }
201 EXPECT_EQ(rotated_width, rotated.width());
202 EXPECT_EQ(rotated_height, rotated.height());
203
204 // Clock-wise order (with 0,0 at top-left)
205 const struct {
206 int x;
207 int y;
208 } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}};
209 // Corresponding corner colors of the frame produced by CreateGradient.
210 const struct {
211 int y;
212 int u;
213 int v;
214 } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}};
215 int corner_offset = static_cast<int>(rotation) / 90;
216
217 for (int i = 0; i < 4; i++) {
218 int j = (i + corner_offset) % 4;
219 int x = corners[j].x * (rotated_width - 1);
220 int y = corners[j].y * (rotated_height - 1);
221 EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]);
222 EXPECT_EQ(colors[i].u,
223 rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]);
224 EXPECT_EQ(colors[i].v,
225 rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]);
226 }
227}
228
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100229int GetU(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
230 if (buf->type() == VideoFrameBuffer::Type::kI420) {
231 return buf->GetI420()
232 ->DataU()[row / 2 * buf->GetI420()->StrideU() + col / 2];
233 } else {
234 return buf->GetI010()
235 ->DataU()[row / 2 * buf->GetI010()->StrideU() + col / 2];
236 }
237}
238
239int GetV(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
240 if (buf->type() == VideoFrameBuffer::Type::kI420) {
241 return buf->GetI420()
242 ->DataV()[row / 2 * buf->GetI420()->StrideV() + col / 2];
243 } else {
244 return buf->GetI010()
245 ->DataV()[row / 2 * buf->GetI010()->StrideV() + col / 2];
246 }
247}
248
249int GetY(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
250 if (buf->type() == VideoFrameBuffer::Type::kI420) {
251 return buf->GetI420()->DataY()[row * buf->GetI420()->StrideY() + col];
252 } else {
253 return buf->GetI010()->DataY()[row * buf->GetI010()->StrideY() + col];
254 }
255}
256
257void PasteFromBuffer(PlanarYuvBuffer* canvas,
258 const PlanarYuvBuffer& picture,
259 int offset_col,
260 int offset_row) {
261 if (canvas->type() == VideoFrameBuffer::Type::kI420) {
262 I420Buffer* buf = static_cast<I420Buffer*>(canvas);
263 buf->PasteFrom(*picture.GetI420(), offset_col, offset_row);
264 } else {
265 I010Buffer* buf = static_cast<I010Buffer*>(canvas);
266 buf->PasteFrom(*picture.GetI010(), offset_col, offset_row);
267 }
268}
269
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700270} // namespace
271
272TEST(TestVideoFrame, WidthHeightValues) {
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100273 VideoFrame frame =
274 VideoFrame::Builder()
275 .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
276 .set_rotation(webrtc::kVideoRotation_0)
277 .set_timestamp_ms(789)
278 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700279 const int valid_value = 10;
280 EXPECT_EQ(valid_value, frame.width());
281 EXPECT_EQ(valid_value, frame.height());
282 frame.set_timestamp(123u);
283 EXPECT_EQ(123u, frame.timestamp());
284 frame.set_ntp_time_ms(456);
285 EXPECT_EQ(456, frame.ntp_time_ms());
286 EXPECT_EQ(789, frame.render_time_ms());
287}
288
289TEST(TestVideoFrame, ShallowCopy) {
290 uint32_t timestamp = 1;
291 int64_t ntp_time_ms = 2;
292 int64_t timestamp_us = 3;
293 int stride_y = 15;
294 int stride_u = 10;
295 int stride_v = 10;
296 int width = 15;
297 int height = 15;
298
299 const int kSizeY = 400;
300 const int kSizeU = 100;
301 const int kSizeV = 100;
302 const VideoRotation kRotation = kVideoRotation_270;
303 uint8_t buffer_y[kSizeY];
304 uint8_t buffer_u[kSizeU];
305 uint8_t buffer_v[kSizeV];
306 memset(buffer_y, 16, kSizeY);
307 memset(buffer_u, 8, kSizeU);
308 memset(buffer_v, 4, kSizeV);
309
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100310 VideoFrame frame1 = VideoFrame::Builder()
311 .set_video_frame_buffer(I420Buffer::Copy(
312 width, height, buffer_y, stride_y, buffer_u,
313 stride_u, buffer_v, stride_v))
314 .set_rotation(kRotation)
315 .set_timestamp_us(0)
316 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700317 frame1.set_timestamp(timestamp);
318 frame1.set_ntp_time_ms(ntp_time_ms);
319 frame1.set_timestamp_us(timestamp_us);
320 VideoFrame frame2(frame1);
321
322 EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200323 const webrtc::I420BufferInterface* yuv1 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700324 frame1.video_frame_buffer()->GetI420();
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200325 const webrtc::I420BufferInterface* yuv2 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700326 frame2.video_frame_buffer()->GetI420();
327 EXPECT_EQ(yuv1->DataY(), yuv2->DataY());
328 EXPECT_EQ(yuv1->DataU(), yuv2->DataU());
329 EXPECT_EQ(yuv1->DataV(), yuv2->DataV());
330
331 EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
332 EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
333 EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us());
334 EXPECT_EQ(frame2.rotation(), frame1.rotation());
335
336 frame2.set_timestamp(timestamp + 1);
337 frame2.set_ntp_time_ms(ntp_time_ms + 1);
338 frame2.set_timestamp_us(timestamp_us + 1);
339 frame2.set_rotation(kVideoRotation_90);
340
341 EXPECT_NE(frame2.timestamp(), frame1.timestamp());
342 EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms());
343 EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us());
344 EXPECT_NE(frame2.rotation(), frame1.rotation());
345}
346
347TEST(TestVideoFrame, TextureInitialValues) {
348 VideoFrame frame = test::FakeNativeBuffer::CreateFrame(
349 640, 480, 100, 10, webrtc::kVideoRotation_0);
350 EXPECT_EQ(640, frame.width());
351 EXPECT_EQ(480, frame.height());
352 EXPECT_EQ(100u, frame.timestamp());
353 EXPECT_EQ(10, frame.render_time_ms());
354 ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
355 EXPECT_TRUE(frame.video_frame_buffer()->type() ==
356 VideoFrameBuffer::Type::kNative);
357
358 frame.set_timestamp(200);
359 EXPECT_EQ(200u, frame.timestamp());
360 frame.set_timestamp_us(20);
361 EXPECT_EQ(20, frame.timestamp_us());
362}
363
364class TestPlanarYuvBuffer
365 : public ::testing::TestWithParam<VideoFrameBuffer::Type> {};
366
367rtc::scoped_refptr<I420Buffer> CreateAndFillBuffer() {
368 auto buf = I420Buffer::Create(20, 10);
369 memset(buf->MutableDataY(), 1, 200);
370 memset(buf->MutableDataU(), 2, 50);
371 memset(buf->MutableDataV(), 3, 50);
372 return buf;
373}
374
375TEST_P(TestPlanarYuvBuffer, Copy) {
376 rtc::scoped_refptr<PlanarYuvBuffer> buf1;
377 switch (GetParam()) {
378 case VideoFrameBuffer::Type::kI420: {
379 buf1 = CreateAndFillBuffer();
380 break;
381 }
382 case VideoFrameBuffer::Type::kI010: {
383 buf1 = I010Buffer::Copy(*CreateAndFillBuffer());
384 break;
385 }
386 default:
387 RTC_NOTREACHED();
388 }
389
390 rtc::scoped_refptr<PlanarYuvBuffer> buf2 =
391 PlanarYuvBufferFactory::Copy(*buf1);
392 EXPECT_TRUE(test::FrameBufsEqual(buf1->ToI420(), buf2->ToI420()));
393}
394
395TEST_P(TestPlanarYuvBuffer, Scale) {
396 rtc::scoped_refptr<PlanarYuvBuffer> buf =
397 CreateGradient(GetParam(), 200, 100);
398
399 // Pure scaling, no cropping.
400 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
401 PlanarYuvBufferFactory::ScaleFrom(*buf, 150, 75);
402 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.0, 1.0, 1.0);
403}
404
405TEST_P(TestPlanarYuvBuffer, CropXCenter) {
406 rtc::scoped_refptr<PlanarYuvBuffer> buf =
407 CreateGradient(GetParam(), 200, 100);
408
409 // Pure center cropping, no scaling.
410 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
411 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 50, 0, 100, 100);
412 CheckCrop(*scaled_buffer->ToI420(), 0.25, 0.0, 0.5, 1.0);
413}
414
415TEST_P(TestPlanarYuvBuffer, CropXNotCenter) {
416 rtc::scoped_refptr<PlanarYuvBuffer> buf =
417 CreateGradient(GetParam(), 200, 100);
418
419 // Non-center cropping, no scaling.
420 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
421 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 25, 0, 100, 100);
422 CheckCrop(*scaled_buffer->ToI420(), 0.125, 0.0, 0.5, 1.0);
423}
424
425TEST_P(TestPlanarYuvBuffer, CropYCenter) {
426 rtc::scoped_refptr<PlanarYuvBuffer> buf =
427 CreateGradient(GetParam(), 100, 200);
428
429 // Pure center cropping, no scaling.
430 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
431 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 50, 100, 100);
432 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.25, 1.0, 0.5);
433}
434
435TEST_P(TestPlanarYuvBuffer, CropYNotCenter) {
436 rtc::scoped_refptr<PlanarYuvBuffer> buf =
437 CreateGradient(GetParam(), 100, 200);
438
439 // Pure center cropping, no scaling.
440 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
441 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 25, 100, 100);
442 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.5);
443}
444
445TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) {
446 rtc::scoped_refptr<PlanarYuvBuffer> buf =
447 CreateGradient(GetParam(), 640, 480);
448
449 // Pure center cropping, no scaling.
450 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
451 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 320, 180);
452 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.75);
453}
454
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100455TEST_P(TestPlanarYuvBuffer, PastesIntoBuffer) {
456 const int kOffsetx = 20;
457 const int kOffsety = 30;
458 const int kPicSize = 20;
459 const int kWidth = 160;
460 const int kHeight = 80;
461 rtc::scoped_refptr<PlanarYuvBuffer> buf =
462 CreateGradient(GetParam(), kWidth, kHeight);
463
464 rtc::scoped_refptr<PlanarYuvBuffer> original =
465 CreateGradient(GetParam(), kWidth, kHeight);
466
467 rtc::scoped_refptr<PlanarYuvBuffer> picture =
468 CreateGradient(GetParam(), kPicSize, kPicSize);
469
470 rtc::scoped_refptr<PlanarYuvBuffer> odd_picture =
471 CreateGradient(GetParam(), kPicSize + 1, kPicSize - 1);
472
473 PasteFromBuffer(buf.get(), *picture, kOffsetx, kOffsety);
474
475 for (int i = 0; i < kWidth; ++i) {
476 for (int j = 0; j < kHeight; ++j) {
477 bool is_inside = i >= kOffsetx && i < kOffsetx + kPicSize &&
478 j >= kOffsety && j < kOffsety + kPicSize;
479 if (!is_inside) {
480 EXPECT_EQ(GetU(original, i, j), GetU(buf, i, j));
481 EXPECT_EQ(GetV(original, i, j), GetV(buf, i, j));
482 EXPECT_EQ(GetY(original, i, j), GetY(buf, i, j));
483 } else {
484 EXPECT_EQ(GetU(picture, i - kOffsetx, j - kOffsety), GetU(buf, i, j));
485 EXPECT_EQ(GetV(picture, i - kOffsetx, j - kOffsety), GetV(buf, i, j));
486 EXPECT_EQ(GetY(picture, i - kOffsetx, j - kOffsety), GetY(buf, i, j));
487 }
488 }
489 }
490}
491
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100492INSTANTIATE_TEST_SUITE_P(,
493 TestPlanarYuvBuffer,
494 ::testing::Values(VideoFrameBuffer::Type::kI420,
495 VideoFrameBuffer::Type::kI010));
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700496
497class TestPlanarYuvBufferRotate
498 : public ::testing::TestWithParam<
499 std::tuple<webrtc::VideoRotation, VideoFrameBuffer::Type>> {};
500
501TEST_P(TestPlanarYuvBufferRotate, Rotates) {
502 const webrtc::VideoRotation rotation = std::get<0>(GetParam());
503 const VideoFrameBuffer::Type type = std::get<1>(GetParam());
504 rtc::scoped_refptr<PlanarYuvBuffer> buffer = CreateGradient(type, 640, 480);
505 rtc::scoped_refptr<PlanarYuvBuffer> rotated_buffer =
506 PlanarYuvBufferFactory::Rotate(*buffer, rotation);
507 CheckRotate(640, 480, rotation, *rotated_buffer->ToI420());
508}
509
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100510INSTANTIATE_TEST_SUITE_P(
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700511 Rotate,
512 TestPlanarYuvBufferRotate,
513 ::testing::Combine(::testing::Values(kVideoRotation_0,
514 kVideoRotation_90,
515 kVideoRotation_180,
516 kVideoRotation_270),
517 ::testing::Values(VideoFrameBuffer::Type::kI420,
518 VideoFrameBuffer::Type::kI010)));
519
520} // namespace webrtc