blob: f7a27be7475d667fe3a61468567e9739ee3e0b01 [file] [log] [blame]
Emircan Uysaler901e0ff2018-06-26 12:22:38 -07001/*
2 * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Jonas Olssona4d87372019-07-05 19:08:33 +020011#include "api/video/video_frame.h"
12
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070013#include <math.h>
14#include <string.h>
15
16#include "api/video/i010_buffer.h"
17#include "api/video/i420_buffer.h"
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070018#include "rtc_base/bind.h"
Steve Anton10542f22019-01-11 09:11:00 -080019#include "rtc_base/time_utils.h"
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070020#include "test/fake_texture_frame.h"
21#include "test/frame_utils.h"
22#include "test/gtest.h"
23
24namespace webrtc {
25
26namespace {
27
28// Helper class to delegate calls to appropriate container.
29class PlanarYuvBufferFactory {
30 public:
31 static rtc::scoped_refptr<PlanarYuvBuffer> Create(VideoFrameBuffer::Type type,
32 int width,
33 int height) {
34 switch (type) {
35 case VideoFrameBuffer::Type::kI420:
36 return I420Buffer::Create(width, height);
37 case VideoFrameBuffer::Type::kI010:
38 return I010Buffer::Create(width, height);
39 default:
40 RTC_NOTREACHED();
41 }
42 return nullptr;
43 }
44
45 static rtc::scoped_refptr<PlanarYuvBuffer> Copy(const VideoFrameBuffer& src) {
46 switch (src.type()) {
47 case VideoFrameBuffer::Type::kI420:
48 return I420Buffer::Copy(src);
49 case VideoFrameBuffer::Type::kI010:
50 return I010Buffer::Copy(*src.GetI010());
51 default:
52 RTC_NOTREACHED();
53 }
54 return nullptr;
55 }
56
57 static rtc::scoped_refptr<PlanarYuvBuffer> Rotate(const VideoFrameBuffer& src,
58 VideoRotation rotation) {
59 switch (src.type()) {
60 case VideoFrameBuffer::Type::kI420:
61 return I420Buffer::Rotate(src, rotation);
62 case VideoFrameBuffer::Type::kI010:
63 return I010Buffer::Rotate(*src.GetI010(), rotation);
64 default:
65 RTC_NOTREACHED();
66 }
67 return nullptr;
68 }
69
70 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
71 const VideoFrameBuffer& src,
72 int offset_x,
73 int offset_y,
74 int crop_width,
75 int crop_height) {
76 switch (src.type()) {
77 case VideoFrameBuffer::Type::kI420: {
78 rtc::scoped_refptr<I420Buffer> buffer =
79 I420Buffer::Create(crop_width, crop_height);
80 buffer->CropAndScaleFrom(*src.GetI420(), offset_x, offset_y, crop_width,
81 crop_height);
82 return buffer;
83 }
84 case VideoFrameBuffer::Type::kI010: {
85 rtc::scoped_refptr<I010Buffer> buffer =
86 I010Buffer::Create(crop_width, crop_height);
87 buffer->CropAndScaleFrom(*src.GetI010(), offset_x, offset_y, crop_width,
88 crop_height);
89 return buffer;
90 }
91 default:
92 RTC_NOTREACHED();
93 }
94 return nullptr;
95 }
96
97 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
98 const VideoFrameBuffer& src,
99 int crop_width,
100 int crop_height) {
101 const int out_width =
102 std::min(src.width(), crop_width * src.height() / crop_height);
103 const int out_height =
104 std::min(src.height(), crop_height * src.width() / crop_width);
105 return CropAndScaleFrom(src, (src.width() - out_width) / 2,
106 (src.height() - out_height) / 2, out_width,
107 out_height);
108 }
109
110 static rtc::scoped_refptr<PlanarYuvBuffer>
111 ScaleFrom(const VideoFrameBuffer& src, int crop_width, int crop_height) {
112 switch (src.type()) {
113 case VideoFrameBuffer::Type::kI420: {
114 rtc::scoped_refptr<I420Buffer> buffer =
115 I420Buffer::Create(crop_width, crop_height);
116 buffer->ScaleFrom(*src.GetI420());
117 return buffer;
118 }
119 case VideoFrameBuffer::Type::kI010: {
120 rtc::scoped_refptr<I010Buffer> buffer =
121 I010Buffer::Create(crop_width, crop_height);
122 buffer->ScaleFrom(*src.GetI010());
123 return buffer;
124 }
125 default:
126 RTC_NOTREACHED();
127 }
128 return nullptr;
129 }
130};
131
132rtc::scoped_refptr<PlanarYuvBuffer> CreateGradient(VideoFrameBuffer::Type type,
133 int width,
134 int height) {
135 rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
136 // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
137 for (int x = 0; x < width; x++) {
138 for (int y = 0; y < height; y++) {
139 buffer->MutableDataY()[x + y * width] =
140 128 * (x * height + y * width) / (width * height);
141 }
142 }
143 int chroma_width = buffer->ChromaWidth();
144 int chroma_height = buffer->ChromaHeight();
145 for (int x = 0; x < chroma_width; x++) {
146 for (int y = 0; y < chroma_height; y++) {
147 buffer->MutableDataU()[x + y * chroma_width] =
148 255 * x / (chroma_width - 1);
149 buffer->MutableDataV()[x + y * chroma_width] =
150 255 * y / (chroma_height - 1);
151 }
152 }
153 if (type == VideoFrameBuffer::Type::kI420)
154 return buffer;
155
156 RTC_DCHECK(type == VideoFrameBuffer::Type::kI010);
157 return I010Buffer::Copy(*buffer);
158}
159
160// The offsets and sizes describe the rectangle extracted from the
161// original (gradient) frame, in relative coordinates where the
162// original frame correspond to the unit square, 0.0 <= x, y < 1.0.
163void CheckCrop(const webrtc::I420BufferInterface& frame,
164 double offset_x,
165 double offset_y,
166 double rel_width,
167 double rel_height) {
168 int width = frame.width();
169 int height = frame.height();
170 // Check that pixel values in the corners match the gradient used
171 // for initialization.
172 for (int i = 0; i < 2; i++) {
173 for (int j = 0; j < 2; j++) {
174 // Pixel coordinates of the corner.
175 int x = i * (width - 1);
176 int y = j * (height - 1);
177 // Relative coordinates, range 0.0 - 1.0 correspond to the
178 // size of the uncropped input frame.
179 double orig_x = offset_x + i * rel_width;
180 double orig_y = offset_y + j * rel_height;
181
182 EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0,
183 (orig_x + orig_y) / 2, 0.02);
184 EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0,
185 orig_x, 0.02);
186 EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0,
187 orig_y, 0.02);
188 }
189 }
190}
191
192void CheckRotate(int width,
193 int height,
194 webrtc::VideoRotation rotation,
195 const webrtc::I420BufferInterface& rotated) {
196 int rotated_width = width;
197 int rotated_height = height;
198
199 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
200 std::swap(rotated_width, rotated_height);
201 }
202 EXPECT_EQ(rotated_width, rotated.width());
203 EXPECT_EQ(rotated_height, rotated.height());
204
205 // Clock-wise order (with 0,0 at top-left)
206 const struct {
207 int x;
208 int y;
209 } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}};
210 // Corresponding corner colors of the frame produced by CreateGradient.
211 const struct {
212 int y;
213 int u;
214 int v;
215 } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}};
216 int corner_offset = static_cast<int>(rotation) / 90;
217
218 for (int i = 0; i < 4; i++) {
219 int j = (i + corner_offset) % 4;
220 int x = corners[j].x * (rotated_width - 1);
221 int y = corners[j].y * (rotated_height - 1);
222 EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]);
223 EXPECT_EQ(colors[i].u,
224 rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]);
225 EXPECT_EQ(colors[i].v,
226 rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]);
227 }
228}
229
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100230int GetU(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
231 if (buf->type() == VideoFrameBuffer::Type::kI420) {
232 return buf->GetI420()
233 ->DataU()[row / 2 * buf->GetI420()->StrideU() + col / 2];
234 } else {
235 return buf->GetI010()
236 ->DataU()[row / 2 * buf->GetI010()->StrideU() + col / 2];
237 }
238}
239
240int GetV(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
241 if (buf->type() == VideoFrameBuffer::Type::kI420) {
242 return buf->GetI420()
243 ->DataV()[row / 2 * buf->GetI420()->StrideV() + col / 2];
244 } else {
245 return buf->GetI010()
246 ->DataV()[row / 2 * buf->GetI010()->StrideV() + col / 2];
247 }
248}
249
250int GetY(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
251 if (buf->type() == VideoFrameBuffer::Type::kI420) {
252 return buf->GetI420()->DataY()[row * buf->GetI420()->StrideY() + col];
253 } else {
254 return buf->GetI010()->DataY()[row * buf->GetI010()->StrideY() + col];
255 }
256}
257
258void PasteFromBuffer(PlanarYuvBuffer* canvas,
259 const PlanarYuvBuffer& picture,
260 int offset_col,
261 int offset_row) {
262 if (canvas->type() == VideoFrameBuffer::Type::kI420) {
263 I420Buffer* buf = static_cast<I420Buffer*>(canvas);
264 buf->PasteFrom(*picture.GetI420(), offset_col, offset_row);
265 } else {
266 I010Buffer* buf = static_cast<I010Buffer*>(canvas);
267 buf->PasteFrom(*picture.GetI010(), offset_col, offset_row);
268 }
269}
270
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700271} // namespace
272
273TEST(TestVideoFrame, WidthHeightValues) {
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100274 VideoFrame frame =
275 VideoFrame::Builder()
276 .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
277 .set_rotation(webrtc::kVideoRotation_0)
278 .set_timestamp_ms(789)
279 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700280 const int valid_value = 10;
281 EXPECT_EQ(valid_value, frame.width());
282 EXPECT_EQ(valid_value, frame.height());
283 frame.set_timestamp(123u);
284 EXPECT_EQ(123u, frame.timestamp());
285 frame.set_ntp_time_ms(456);
286 EXPECT_EQ(456, frame.ntp_time_ms());
287 EXPECT_EQ(789, frame.render_time_ms());
288}
289
290TEST(TestVideoFrame, ShallowCopy) {
291 uint32_t timestamp = 1;
292 int64_t ntp_time_ms = 2;
293 int64_t timestamp_us = 3;
294 int stride_y = 15;
295 int stride_u = 10;
296 int stride_v = 10;
297 int width = 15;
298 int height = 15;
299
300 const int kSizeY = 400;
301 const int kSizeU = 100;
302 const int kSizeV = 100;
303 const VideoRotation kRotation = kVideoRotation_270;
304 uint8_t buffer_y[kSizeY];
305 uint8_t buffer_u[kSizeU];
306 uint8_t buffer_v[kSizeV];
307 memset(buffer_y, 16, kSizeY);
308 memset(buffer_u, 8, kSizeU);
309 memset(buffer_v, 4, kSizeV);
310
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100311 VideoFrame frame1 = VideoFrame::Builder()
312 .set_video_frame_buffer(I420Buffer::Copy(
313 width, height, buffer_y, stride_y, buffer_u,
314 stride_u, buffer_v, stride_v))
315 .set_rotation(kRotation)
316 .set_timestamp_us(0)
317 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700318 frame1.set_timestamp(timestamp);
319 frame1.set_ntp_time_ms(ntp_time_ms);
320 frame1.set_timestamp_us(timestamp_us);
321 VideoFrame frame2(frame1);
322
323 EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200324 const webrtc::I420BufferInterface* yuv1 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700325 frame1.video_frame_buffer()->GetI420();
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200326 const webrtc::I420BufferInterface* yuv2 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700327 frame2.video_frame_buffer()->GetI420();
328 EXPECT_EQ(yuv1->DataY(), yuv2->DataY());
329 EXPECT_EQ(yuv1->DataU(), yuv2->DataU());
330 EXPECT_EQ(yuv1->DataV(), yuv2->DataV());
331
332 EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
333 EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
334 EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us());
335 EXPECT_EQ(frame2.rotation(), frame1.rotation());
336
337 frame2.set_timestamp(timestamp + 1);
338 frame2.set_ntp_time_ms(ntp_time_ms + 1);
339 frame2.set_timestamp_us(timestamp_us + 1);
340 frame2.set_rotation(kVideoRotation_90);
341
342 EXPECT_NE(frame2.timestamp(), frame1.timestamp());
343 EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms());
344 EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us());
345 EXPECT_NE(frame2.rotation(), frame1.rotation());
346}
347
348TEST(TestVideoFrame, TextureInitialValues) {
349 VideoFrame frame = test::FakeNativeBuffer::CreateFrame(
350 640, 480, 100, 10, webrtc::kVideoRotation_0);
351 EXPECT_EQ(640, frame.width());
352 EXPECT_EQ(480, frame.height());
353 EXPECT_EQ(100u, frame.timestamp());
354 EXPECT_EQ(10, frame.render_time_ms());
355 ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
356 EXPECT_TRUE(frame.video_frame_buffer()->type() ==
357 VideoFrameBuffer::Type::kNative);
358
359 frame.set_timestamp(200);
360 EXPECT_EQ(200u, frame.timestamp());
361 frame.set_timestamp_us(20);
362 EXPECT_EQ(20, frame.timestamp_us());
363}
364
Markus Handelle6eded32019-11-15 16:18:21 +0100365class TestEncodedFrame : public VideoFrame::EncodedVideoFrameBuffer {
366 public:
367 rtc::ArrayView<const uint8_t> data() const override {
368 return rtc::ArrayView<const uint8_t>();
369 }
370 webrtc::ColorSpace* color_space() const override { return nullptr; }
371 VideoCodecType codec() const override { return kVideoCodecGeneric; }
372 bool is_key_frame() const { return false; }
373};
374
375TEST(TestVideoFrame, AcceptsEncodedFrameSource) {
376 VideoFrame frame =
377 VideoFrame::Builder()
378 .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
379 .build();
380 EXPECT_EQ(frame.encoded_video_frame_buffer(), nullptr);
381 auto encoded_frame = new rtc::RefCountedObject<TestEncodedFrame>();
382 frame.set_encoded_video_frame_buffer(encoded_frame);
383 EXPECT_EQ(frame.encoded_video_frame_buffer(), encoded_frame);
384}
385
386TEST(TestVideoFrame, CopiesWithSameEncodedFrameSource) {
387 VideoFrame frame =
388 VideoFrame::Builder()
389 .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
390 .set_encoded_video_frame_buffer(
391 new rtc::RefCountedObject<TestEncodedFrame>())
392 .build();
393 VideoFrame frame2 = frame;
394 EXPECT_EQ(frame.encoded_video_frame_buffer().get(),
395 frame2.encoded_video_frame_buffer().get());
396}
397
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700398class TestPlanarYuvBuffer
399 : public ::testing::TestWithParam<VideoFrameBuffer::Type> {};
400
401rtc::scoped_refptr<I420Buffer> CreateAndFillBuffer() {
402 auto buf = I420Buffer::Create(20, 10);
403 memset(buf->MutableDataY(), 1, 200);
404 memset(buf->MutableDataU(), 2, 50);
405 memset(buf->MutableDataV(), 3, 50);
406 return buf;
407}
408
409TEST_P(TestPlanarYuvBuffer, Copy) {
410 rtc::scoped_refptr<PlanarYuvBuffer> buf1;
411 switch (GetParam()) {
412 case VideoFrameBuffer::Type::kI420: {
413 buf1 = CreateAndFillBuffer();
414 break;
415 }
416 case VideoFrameBuffer::Type::kI010: {
417 buf1 = I010Buffer::Copy(*CreateAndFillBuffer());
418 break;
419 }
420 default:
421 RTC_NOTREACHED();
422 }
423
424 rtc::scoped_refptr<PlanarYuvBuffer> buf2 =
425 PlanarYuvBufferFactory::Copy(*buf1);
426 EXPECT_TRUE(test::FrameBufsEqual(buf1->ToI420(), buf2->ToI420()));
427}
428
429TEST_P(TestPlanarYuvBuffer, Scale) {
430 rtc::scoped_refptr<PlanarYuvBuffer> buf =
431 CreateGradient(GetParam(), 200, 100);
432
433 // Pure scaling, no cropping.
434 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
435 PlanarYuvBufferFactory::ScaleFrom(*buf, 150, 75);
436 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.0, 1.0, 1.0);
437}
438
439TEST_P(TestPlanarYuvBuffer, CropXCenter) {
440 rtc::scoped_refptr<PlanarYuvBuffer> buf =
441 CreateGradient(GetParam(), 200, 100);
442
443 // Pure center cropping, no scaling.
444 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
445 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 50, 0, 100, 100);
446 CheckCrop(*scaled_buffer->ToI420(), 0.25, 0.0, 0.5, 1.0);
447}
448
449TEST_P(TestPlanarYuvBuffer, CropXNotCenter) {
450 rtc::scoped_refptr<PlanarYuvBuffer> buf =
451 CreateGradient(GetParam(), 200, 100);
452
453 // Non-center cropping, no scaling.
454 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
455 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 25, 0, 100, 100);
456 CheckCrop(*scaled_buffer->ToI420(), 0.125, 0.0, 0.5, 1.0);
457}
458
459TEST_P(TestPlanarYuvBuffer, CropYCenter) {
460 rtc::scoped_refptr<PlanarYuvBuffer> buf =
461 CreateGradient(GetParam(), 100, 200);
462
463 // Pure center cropping, no scaling.
464 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
465 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 50, 100, 100);
466 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.25, 1.0, 0.5);
467}
468
469TEST_P(TestPlanarYuvBuffer, CropYNotCenter) {
470 rtc::scoped_refptr<PlanarYuvBuffer> buf =
471 CreateGradient(GetParam(), 100, 200);
472
473 // Pure center cropping, no scaling.
474 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
475 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 25, 100, 100);
476 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.5);
477}
478
479TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) {
480 rtc::scoped_refptr<PlanarYuvBuffer> buf =
481 CreateGradient(GetParam(), 640, 480);
482
483 // Pure center cropping, no scaling.
484 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
485 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 320, 180);
486 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.75);
487}
488
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100489TEST_P(TestPlanarYuvBuffer, PastesIntoBuffer) {
490 const int kOffsetx = 20;
491 const int kOffsety = 30;
492 const int kPicSize = 20;
493 const int kWidth = 160;
494 const int kHeight = 80;
495 rtc::scoped_refptr<PlanarYuvBuffer> buf =
496 CreateGradient(GetParam(), kWidth, kHeight);
497
498 rtc::scoped_refptr<PlanarYuvBuffer> original =
499 CreateGradient(GetParam(), kWidth, kHeight);
500
501 rtc::scoped_refptr<PlanarYuvBuffer> picture =
502 CreateGradient(GetParam(), kPicSize, kPicSize);
503
504 rtc::scoped_refptr<PlanarYuvBuffer> odd_picture =
505 CreateGradient(GetParam(), kPicSize + 1, kPicSize - 1);
506
507 PasteFromBuffer(buf.get(), *picture, kOffsetx, kOffsety);
508
509 for (int i = 0; i < kWidth; ++i) {
510 for (int j = 0; j < kHeight; ++j) {
511 bool is_inside = i >= kOffsetx && i < kOffsetx + kPicSize &&
512 j >= kOffsety && j < kOffsety + kPicSize;
513 if (!is_inside) {
514 EXPECT_EQ(GetU(original, i, j), GetU(buf, i, j));
515 EXPECT_EQ(GetV(original, i, j), GetV(buf, i, j));
516 EXPECT_EQ(GetY(original, i, j), GetY(buf, i, j));
517 } else {
518 EXPECT_EQ(GetU(picture, i - kOffsetx, j - kOffsety), GetU(buf, i, j));
519 EXPECT_EQ(GetV(picture, i - kOffsetx, j - kOffsety), GetV(buf, i, j));
520 EXPECT_EQ(GetY(picture, i - kOffsetx, j - kOffsety), GetY(buf, i, j));
521 }
522 }
523 }
524}
525
Mirko Bonadei1b575412019-09-23 08:34:50 +0200526INSTANTIATE_TEST_SUITE_P(All,
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100527 TestPlanarYuvBuffer,
528 ::testing::Values(VideoFrameBuffer::Type::kI420,
529 VideoFrameBuffer::Type::kI010));
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700530
531class TestPlanarYuvBufferRotate
532 : public ::testing::TestWithParam<
533 std::tuple<webrtc::VideoRotation, VideoFrameBuffer::Type>> {};
534
535TEST_P(TestPlanarYuvBufferRotate, Rotates) {
536 const webrtc::VideoRotation rotation = std::get<0>(GetParam());
537 const VideoFrameBuffer::Type type = std::get<1>(GetParam());
538 rtc::scoped_refptr<PlanarYuvBuffer> buffer = CreateGradient(type, 640, 480);
539 rtc::scoped_refptr<PlanarYuvBuffer> rotated_buffer =
540 PlanarYuvBufferFactory::Rotate(*buffer, rotation);
541 CheckRotate(640, 480, rotation, *rotated_buffer->ToI420());
542}
543
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100544INSTANTIATE_TEST_SUITE_P(
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700545 Rotate,
546 TestPlanarYuvBufferRotate,
547 ::testing::Combine(::testing::Values(kVideoRotation_0,
548 kVideoRotation_90,
549 kVideoRotation_180,
550 kVideoRotation_270),
551 ::testing::Values(VideoFrameBuffer::Type::kI420,
552 VideoFrameBuffer::Type::kI010)));
553
554} // namespace webrtc