blob: e453087e89b054202929be890aec45c636c85214 [file] [log] [blame]
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +00001// libjingle
2// Copyright 2014 Google Inc.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7// 1. Redistributions of source code must retain the above copyright notice,
8// this list of conditions and the following disclaimer.
9// 2. Redistributions in binary form must reproduce the above copyright notice,
10// this list of conditions and the following disclaimer in the documentation
11// and/or other materials provided with the distribution.
12// 3. The name of the author may not be used to endorse or promote products
13// derived from this software without specific prior written permission.
14//
15// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
16// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
17// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
18// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
21// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
22// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
23// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
24// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
26#include <string>
27
28#include "libyuv/convert.h"
29#include "libyuv/convert_from.h"
30#include "libyuv/convert_from_argb.h"
31#include "libyuv/format_conversion.h"
32#include "libyuv/mjpeg_decoder.h"
33#include "libyuv/planar_functions.h"
buildbot@webrtc.orga09a9992014-08-13 17:26:08 +000034#include "talk/media/base/testutils.h"
35#include "talk/media/base/videocommon.h"
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +000036#include "webrtc/base/flags.h"
37#include "webrtc/base/gunit.h"
38#include "webrtc/base/scoped_ptr.h"
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +000039
40// Undefine macros for the windows build.
41#undef max
42#undef min
43
44using cricket::DumpPlanarYuvTestImage;
45
46DEFINE_bool(planarfunctions_dump, false,
47 "whether to write out scaled images for inspection");
48DEFINE_int(planarfunctions_repeat, 1,
49 "how many times to perform each scaling operation (for perf testing)");
50
51namespace cricket {
52
53// Number of testing colors in each color channel.
54static const int kTestingColorChannelResolution = 6;
55
56// The total number of testing colors
57// kTestingColorNum = kTestingColorChannelResolution^3;
58static const int kTestingColorNum = kTestingColorChannelResolution *
59 kTestingColorChannelResolution * kTestingColorChannelResolution;
60
61static const int kWidth = 1280;
62static const int kHeight = 720;
63static const int kAlignment = 16;
64
65class PlanarFunctionsTest : public testing::TestWithParam<int> {
66 protected:
67 PlanarFunctionsTest() : dump_(false), repeat_(1) {
68 InitializeColorBand();
69 }
70
71 virtual void SetUp() {
72 dump_ = FLAG_planarfunctions_dump;
73 repeat_ = FLAG_planarfunctions_repeat;
74 }
75
76 // Initialize the color band for testing.
77 void InitializeColorBand() {
78 testing_color_y_.reset(new uint8[kTestingColorNum]);
79 testing_color_u_.reset(new uint8[kTestingColorNum]);
80 testing_color_v_.reset(new uint8[kTestingColorNum]);
81 testing_color_r_.reset(new uint8[kTestingColorNum]);
82 testing_color_g_.reset(new uint8[kTestingColorNum]);
83 testing_color_b_.reset(new uint8[kTestingColorNum]);
84 int color_counter = 0;
85 for (int i = 0; i < kTestingColorChannelResolution; ++i) {
86 uint8 color_r = static_cast<uint8>(
87 i * 255 / (kTestingColorChannelResolution - 1));
88 for (int j = 0; j < kTestingColorChannelResolution; ++j) {
89 uint8 color_g = static_cast<uint8>(
90 j * 255 / (kTestingColorChannelResolution - 1));
91 for (int k = 0; k < kTestingColorChannelResolution; ++k) {
92 uint8 color_b = static_cast<uint8>(
93 k * 255 / (kTestingColorChannelResolution - 1));
94 testing_color_r_[color_counter] = color_r;
95 testing_color_g_[color_counter] = color_g;
96 testing_color_b_[color_counter] = color_b;
97 // Converting the testing RGB colors to YUV colors.
98 ConvertRgbPixel(color_r, color_g, color_b,
99 &(testing_color_y_[color_counter]),
100 &(testing_color_u_[color_counter]),
101 &(testing_color_v_[color_counter]));
102 ++color_counter;
103 }
104 }
105 }
106 }
107 // Simple and slow RGB->YUV conversion. From NTSC standard, c/o Wikipedia.
108 // (from lmivideoframe_unittest.cc)
109 void ConvertRgbPixel(uint8 r, uint8 g, uint8 b,
110 uint8* y, uint8* u, uint8* v) {
111 *y = ClampUint8(.257 * r + .504 * g + .098 * b + 16);
112 *u = ClampUint8(-.148 * r - .291 * g + .439 * b + 128);
113 *v = ClampUint8(.439 * r - .368 * g - .071 * b + 128);
114 }
115
116 uint8 ClampUint8(double value) {
117 value = std::max(0., std::min(255., value));
118 uint8 uint8_value = static_cast<uint8>(value);
119 return uint8_value;
120 }
121
122 // Generate a Red-Green-Blue inter-weaving chessboard-like
123 // YUV testing image (I420/I422/I444).
124 // The pattern looks like c0 c1 c2 c3 ...
125 // c1 c2 c3 c4 ...
126 // c2 c3 c4 c5 ...
127 // ...............
128 // The size of each chrome block is (block_size) x (block_size).
129 uint8* CreateFakeYuvTestingImage(int height, int width, int block_size,
130 libyuv::JpegSubsamplingType subsample_type,
131 uint8* &y_pointer,
132 uint8* &u_pointer,
133 uint8* &v_pointer) {
134 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
135 int y_size = height * width;
136 int u_size, v_size;
137 int vertical_sample_ratio = 1, horizontal_sample_ratio = 1;
138 switch (subsample_type) {
139 case libyuv::kJpegYuv420:
140 u_size = ((height + 1) >> 1) * ((width + 1) >> 1);
141 v_size = u_size;
142 vertical_sample_ratio = 2, horizontal_sample_ratio = 2;
143 break;
144 case libyuv::kJpegYuv422:
145 u_size = height * ((width + 1) >> 1);
146 v_size = u_size;
147 vertical_sample_ratio = 1, horizontal_sample_ratio = 2;
148 break;
149 case libyuv::kJpegYuv444:
150 v_size = u_size = y_size;
151 vertical_sample_ratio = 1, horizontal_sample_ratio = 1;
152 break;
153 case libyuv::kJpegUnknown:
154 default:
155 return NULL;
156 break;
157 }
158 uint8* image_pointer = new uint8[y_size + u_size + v_size + kAlignment];
159 y_pointer = ALIGNP(image_pointer, kAlignment);
160 u_pointer = ALIGNP(&image_pointer[y_size], kAlignment);
161 v_pointer = ALIGNP(&image_pointer[y_size + u_size], kAlignment);
162 uint8* current_y_pointer = y_pointer;
163 uint8* current_u_pointer = u_pointer;
164 uint8* current_v_pointer = v_pointer;
165 for (int j = 0; j < height; ++j) {
166 for (int i = 0; i < width; ++i) {
167 int color = ((i / block_size) + (j / block_size)) % kTestingColorNum;
168 *(current_y_pointer++) = testing_color_y_[color];
169 if (i % horizontal_sample_ratio == 0 &&
170 j % vertical_sample_ratio == 0) {
171 *(current_u_pointer++) = testing_color_u_[color];
172 *(current_v_pointer++) = testing_color_v_[color];
173 }
174 }
175 }
176 return image_pointer;
177 }
178
179 // Generate a Red-Green-Blue inter-weaving chessboard-like
180 // YUY2/UYVY testing image.
181 // The pattern looks like c0 c1 c2 c3 ...
182 // c1 c2 c3 c4 ...
183 // c2 c3 c4 c5 ...
184 // ...............
185 // The size of each chrome block is (block_size) x (block_size).
186 uint8* CreateFakeInterleaveYuvTestingImage(
187 int height, int width, int block_size,
188 uint8* &yuv_pointer, FourCC fourcc_type) {
189 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
190 if (fourcc_type != FOURCC_YUY2 && fourcc_type != FOURCC_UYVY) {
191 LOG(LS_ERROR) << "Format " << static_cast<int>(fourcc_type)
192 << " is not supported.";
193 return NULL;
194 }
195 // Regularize the width of the output to be even.
196 int awidth = (width + 1) & ~1;
197
198 uint8* image_pointer = new uint8[2 * height * awidth + kAlignment];
199 yuv_pointer = ALIGNP(image_pointer, kAlignment);
200 uint8* current_yuv_pointer = yuv_pointer;
201 switch (fourcc_type) {
202 case FOURCC_YUY2: {
203 for (int j = 0; j < height; ++j) {
204 for (int i = 0; i < awidth; i += 2, current_yuv_pointer += 4) {
205 int color1 = ((i / block_size) + (j / block_size)) %
206 kTestingColorNum;
207 int color2 = (((i + 1) / block_size) + (j / block_size)) %
208 kTestingColorNum;
209 current_yuv_pointer[0] = testing_color_y_[color1];
210 if (i < width) {
211 current_yuv_pointer[1] = static_cast<uint8>(
212 (static_cast<uint32>(testing_color_u_[color1]) +
213 static_cast<uint32>(testing_color_u_[color2])) / 2);
214 current_yuv_pointer[2] = testing_color_y_[color2];
215 current_yuv_pointer[3] = static_cast<uint8>(
216 (static_cast<uint32>(testing_color_v_[color1]) +
217 static_cast<uint32>(testing_color_v_[color2])) / 2);
218 } else {
219 current_yuv_pointer[1] = testing_color_u_[color1];
220 current_yuv_pointer[2] = 0;
221 current_yuv_pointer[3] = testing_color_v_[color1];
222 }
223 }
224 }
225 break;
226 }
227 case FOURCC_UYVY: {
228 for (int j = 0; j < height; ++j) {
229 for (int i = 0; i < awidth; i += 2, current_yuv_pointer += 4) {
230 int color1 = ((i / block_size) + (j / block_size)) %
231 kTestingColorNum;
232 int color2 = (((i + 1) / block_size) + (j / block_size)) %
233 kTestingColorNum;
234 if (i < width) {
235 current_yuv_pointer[0] = static_cast<uint8>(
236 (static_cast<uint32>(testing_color_u_[color1]) +
237 static_cast<uint32>(testing_color_u_[color2])) / 2);
238 current_yuv_pointer[1] = testing_color_y_[color1];
239 current_yuv_pointer[2] = static_cast<uint8>(
240 (static_cast<uint32>(testing_color_v_[color1]) +
241 static_cast<uint32>(testing_color_v_[color2])) / 2);
242 current_yuv_pointer[3] = testing_color_y_[color2];
243 } else {
244 current_yuv_pointer[0] = testing_color_u_[color1];
245 current_yuv_pointer[1] = testing_color_y_[color1];
246 current_yuv_pointer[2] = testing_color_v_[color1];
247 current_yuv_pointer[3] = 0;
248 }
249 }
250 }
251 break;
252 }
253 }
254 return image_pointer;
255 }
256 // Generate a Red-Green-Blue inter-weaving chessboard-like
257 // Q420 testing image.
258 // The pattern looks like c0 c1 c2 c3 ...
259 // c1 c2 c3 c4 ...
260 // c2 c3 c4 c5 ...
261 // ...............
262 // The size of each chrome block is (block_size) x (block_size).
263 uint8* CreateFakeQ420TestingImage(int height, int width, int block_size,
264 uint8* &y_pointer, uint8* &yuy2_pointer) {
265 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
266 // Regularize the width of the output to be even.
267 int awidth = (width + 1) & ~1;
268
269 uint8* image_pointer = new uint8[(height / 2) * awidth * 2 +
270 ((height + 1) / 2) * width + kAlignment];
271 y_pointer = ALIGNP(image_pointer, kAlignment);
272 yuy2_pointer = y_pointer + ((height + 1) / 2) * width;
273 uint8* current_yuy2_pointer = yuy2_pointer;
274 uint8* current_y_pointer = y_pointer;
275 for (int j = 0; j < height; ++j) {
276 if (j % 2 == 0) {
277 for (int i = 0; i < width; ++i) {
278 int color = ((i / block_size) + (j / block_size)) %
279 kTestingColorNum;
280 *(current_y_pointer++) = testing_color_y_[color];
281 }
282 } else {
283 for (int i = 0; i < awidth; i += 2, current_yuy2_pointer += 4) {
284 int color1 = ((i / block_size) + (j / block_size)) %
285 kTestingColorNum;
286 int color2 = (((i + 1) / block_size) + (j / block_size)) %
287 kTestingColorNum;
288 current_yuy2_pointer[0] = testing_color_y_[color1];
289 if (i < width) {
290 current_yuy2_pointer[1] = static_cast<uint8>(
291 (static_cast<uint32>(testing_color_u_[color1]) +
292 static_cast<uint32>(testing_color_u_[color2])) / 2);
293 current_yuy2_pointer[2] = testing_color_y_[color2];
294 current_yuy2_pointer[3] = static_cast<uint8>(
295 (static_cast<uint32>(testing_color_v_[color1]) +
296 static_cast<uint32>(testing_color_v_[color2])) / 2);
297 } else {
298 current_yuy2_pointer[1] = testing_color_u_[color1];
299 current_yuy2_pointer[2] = 0;
300 current_yuy2_pointer[3] = testing_color_v_[color1];
301 }
302 }
303 }
304 }
305 return image_pointer;
306 }
307
308 // Generate a Red-Green-Blue inter-weaving chessboard-like
309 // NV12 testing image.
310 // (Note: No interpolation is used.)
311 // The pattern looks like c0 c1 c2 c3 ...
312 // c1 c2 c3 c4 ...
313 // c2 c3 c4 c5 ...
314 // ...............
315 // The size of each chrome block is (block_size) x (block_size).
316 uint8* CreateFakeNV12TestingImage(int height, int width, int block_size,
317 uint8* &y_pointer, uint8* &uv_pointer) {
318 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
319
320 uint8* image_pointer = new uint8[height * width +
321 ((height + 1) / 2) * ((width + 1) / 2) * 2 + kAlignment];
322 y_pointer = ALIGNP(image_pointer, kAlignment);
323 uv_pointer = y_pointer + height * width;
324 uint8* current_uv_pointer = uv_pointer;
325 uint8* current_y_pointer = y_pointer;
326 for (int j = 0; j < height; ++j) {
327 for (int i = 0; i < width; ++i) {
328 int color = ((i / block_size) + (j / block_size)) %
329 kTestingColorNum;
330 *(current_y_pointer++) = testing_color_y_[color];
331 }
332 if (j % 2 == 0) {
333 for (int i = 0; i < width; i += 2, current_uv_pointer += 2) {
334 int color = ((i / block_size) + (j / block_size)) %
335 kTestingColorNum;
336 current_uv_pointer[0] = testing_color_u_[color];
337 current_uv_pointer[1] = testing_color_v_[color];
338 }
339 }
340 }
341 return image_pointer;
342 }
343
344 // Generate a Red-Green-Blue inter-weaving chessboard-like
345 // M420 testing image.
346 // (Note: No interpolation is used.)
347 // The pattern looks like c0 c1 c2 c3 ...
348 // c1 c2 c3 c4 ...
349 // c2 c3 c4 c5 ...
350 // ...............
351 // The size of each chrome block is (block_size) x (block_size).
352 uint8* CreateFakeM420TestingImage(
353 int height, int width, int block_size, uint8* &m420_pointer) {
354 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
355
356 uint8* image_pointer = new uint8[height * width +
357 ((height + 1) / 2) * ((width + 1) / 2) * 2 + kAlignment];
358 m420_pointer = ALIGNP(image_pointer, kAlignment);
359 uint8* current_m420_pointer = m420_pointer;
360 for (int j = 0; j < height; ++j) {
361 for (int i = 0; i < width; ++i) {
362 int color = ((i / block_size) + (j / block_size)) %
363 kTestingColorNum;
364 *(current_m420_pointer++) = testing_color_y_[color];
365 }
366 if (j % 2 == 1) {
367 for (int i = 0; i < width; i += 2, current_m420_pointer += 2) {
368 int color = ((i / block_size) + ((j - 1) / block_size)) %
369 kTestingColorNum;
370 current_m420_pointer[0] = testing_color_u_[color];
371 current_m420_pointer[1] = testing_color_v_[color];
372 }
373 }
374 }
375 return image_pointer;
376 }
377
378 // Generate a Red-Green-Blue inter-weaving chessboard-like
379 // ARGB/ABGR/RAW/BG24 testing image.
380 // The pattern looks like c0 c1 c2 c3 ...
381 // c1 c2 c3 c4 ...
382 // c2 c3 c4 c5 ...
383 // ...............
384 // The size of each chrome block is (block_size) x (block_size).
385 uint8* CreateFakeArgbTestingImage(int height, int width, int block_size,
386 uint8* &argb_pointer, FourCC fourcc_type) {
387 if (height <= 0 || width <= 0 || block_size <= 0) { return NULL; }
388 uint8* image_pointer = NULL;
389 if (fourcc_type == FOURCC_ABGR || fourcc_type == FOURCC_BGRA ||
390 fourcc_type == FOURCC_ARGB) {
391 image_pointer = new uint8[height * width * 4 + kAlignment];
392 } else if (fourcc_type == FOURCC_RAW || fourcc_type == FOURCC_24BG) {
393 image_pointer = new uint8[height * width * 3 + kAlignment];
394 } else {
395 LOG(LS_ERROR) << "Format " << static_cast<int>(fourcc_type)
396 << " is not supported.";
397 return NULL;
398 }
399 argb_pointer = ALIGNP(image_pointer, kAlignment);
400 uint8* current_pointer = argb_pointer;
401 switch (fourcc_type) {
402 case FOURCC_ARGB: {
403 for (int j = 0; j < height; ++j) {
404 for (int i = 0; i < width; ++i) {
405 int color = ((i / block_size) + (j / block_size)) %
406 kTestingColorNum;
407 *(current_pointer++) = testing_color_b_[color];
408 *(current_pointer++) = testing_color_g_[color];
409 *(current_pointer++) = testing_color_r_[color];
410 *(current_pointer++) = 255;
411 }
412 }
413 break;
414 }
415 case FOURCC_ABGR: {
416 for (int j = 0; j < height; ++j) {
417 for (int i = 0; i < width; ++i) {
418 int color = ((i / block_size) + (j / block_size)) %
419 kTestingColorNum;
420 *(current_pointer++) = testing_color_r_[color];
421 *(current_pointer++) = testing_color_g_[color];
422 *(current_pointer++) = testing_color_b_[color];
423 *(current_pointer++) = 255;
424 }
425 }
426 break;
427 }
428 case FOURCC_BGRA: {
429 for (int j = 0; j < height; ++j) {
430 for (int i = 0; i < width; ++i) {
431 int color = ((i / block_size) + (j / block_size)) %
432 kTestingColorNum;
433 *(current_pointer++) = 255;
434 *(current_pointer++) = testing_color_r_[color];
435 *(current_pointer++) = testing_color_g_[color];
436 *(current_pointer++) = testing_color_b_[color];
437 }
438 }
439 break;
440 }
441 case FOURCC_24BG: {
442 for (int j = 0; j < height; ++j) {
443 for (int i = 0; i < width; ++i) {
444 int color = ((i / block_size) + (j / block_size)) %
445 kTestingColorNum;
446 *(current_pointer++) = testing_color_b_[color];
447 *(current_pointer++) = testing_color_g_[color];
448 *(current_pointer++) = testing_color_r_[color];
449 }
450 }
451 break;
452 }
453 case FOURCC_RAW: {
454 for (int j = 0; j < height; ++j) {
455 for (int i = 0; i < width; ++i) {
456 int color = ((i / block_size) + (j / block_size)) %
457 kTestingColorNum;
458 *(current_pointer++) = testing_color_r_[color];
459 *(current_pointer++) = testing_color_g_[color];
460 *(current_pointer++) = testing_color_b_[color];
461 }
462 }
463 break;
464 }
465 default: {
466 LOG(LS_ERROR) << "Format " << static_cast<int>(fourcc_type)
467 << " is not supported.";
468 }
469 }
470 return image_pointer;
471 }
472
473 // Check if two memory chunks are equal.
474 // (tolerate MSE errors within a threshold).
475 static bool IsMemoryEqual(const uint8* ibuf, const uint8* obuf,
476 int osize, double average_error) {
477 double sse = cricket::ComputeSumSquareError(ibuf, obuf, osize);
478 double error = sse / osize; // Mean Squared Error.
479 double PSNR = cricket::ComputePSNR(sse, osize);
480 LOG(LS_INFO) << "Image MSE: " << error << " Image PSNR: " << PSNR
481 << " First Diff Byte: " << FindDiff(ibuf, obuf, osize);
482 return (error < average_error);
483 }
484
485 // Returns the index of the first differing byte. Easier to debug than memcmp.
486 static int FindDiff(const uint8* buf1, const uint8* buf2, int len) {
487 int i = 0;
488 while (i < len && buf1[i] == buf2[i]) {
489 i++;
490 }
491 return (i < len) ? i : -1;
492 }
493
494 // Dump the result image (ARGB format).
495 void DumpArgbImage(const uint8* obuf, int width, int height) {
496 DumpPlanarArgbTestImage(GetTestName(), obuf, width, height);
497 }
498
499 // Dump the result image (YUV420 format).
500 void DumpYuvImage(const uint8* obuf, int width, int height) {
501 DumpPlanarYuvTestImage(GetTestName(), obuf, width, height);
502 }
503
504 std::string GetTestName() {
505 const testing::TestInfo* const test_info =
506 testing::UnitTest::GetInstance()->current_test_info();
507 std::string test_name(test_info->name());
508 return test_name;
509 }
510
511 bool dump_;
512 int repeat_;
513
514 // Y, U, V and R, G, B channels of testing colors.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000515 rtc::scoped_ptr<uint8[]> testing_color_y_;
516 rtc::scoped_ptr<uint8[]> testing_color_u_;
517 rtc::scoped_ptr<uint8[]> testing_color_v_;
518 rtc::scoped_ptr<uint8[]> testing_color_r_;
519 rtc::scoped_ptr<uint8[]> testing_color_g_;
520 rtc::scoped_ptr<uint8[]> testing_color_b_;
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000521};
522
523TEST_F(PlanarFunctionsTest, I420Copy) {
524 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL;
525 int y_pitch = kWidth;
526 int u_pitch = (kWidth + 1) >> 1;
527 int v_pitch = (kWidth + 1) >> 1;
528 int y_size = kHeight * kWidth;
529 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
530 int block_size = 3;
531 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000532 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000533 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
534 libyuv::kJpegYuv420,
535 y_pointer, u_pointer, v_pointer));
536 // Allocate space for the output image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000537 rtc::scoped_ptr<uint8[]> yuv_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000538 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment]);
539 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment);
540 uint8 *u_output_pointer = y_output_pointer + y_size;
541 uint8 *v_output_pointer = u_output_pointer + uv_size;
542
543 for (int i = 0; i < repeat_; ++i) {
544 libyuv::I420Copy(y_pointer, y_pitch,
545 u_pointer, u_pitch,
546 v_pointer, v_pitch,
547 y_output_pointer, y_pitch,
548 u_output_pointer, u_pitch,
549 v_output_pointer, v_pitch,
550 kWidth, kHeight);
551 }
552
553 // Expect the copied frame to be exactly the same.
554 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_pointer,
555 I420_SIZE(kHeight, kWidth), 1.e-6));
556
557 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); }
558}
559
560TEST_F(PlanarFunctionsTest, I422ToI420) {
561 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL;
562 int y_pitch = kWidth;
563 int u_pitch = (kWidth + 1) >> 1;
564 int v_pitch = (kWidth + 1) >> 1;
565 int y_size = kHeight * kWidth;
566 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
567 int block_size = 2;
568 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000569 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000570 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
571 libyuv::kJpegYuv422,
572 y_pointer, u_pointer, v_pointer));
573 // Allocate space for the output image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000574 rtc::scoped_ptr<uint8[]> yuv_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000575 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment]);
576 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment);
577 uint8 *u_output_pointer = y_output_pointer + y_size;
578 uint8 *v_output_pointer = u_output_pointer + uv_size;
579 // Generate the expected output.
580 uint8 *y_expected_pointer = NULL, *u_expected_pointer = NULL,
581 *v_expected_pointer = NULL;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000582 rtc::scoped_ptr<uint8[]> yuv_output_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000583 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
584 libyuv::kJpegYuv420,
585 y_expected_pointer, u_expected_pointer, v_expected_pointer));
586
587 for (int i = 0; i < repeat_; ++i) {
588 libyuv::I422ToI420(y_pointer, y_pitch,
589 u_pointer, u_pitch,
590 v_pointer, v_pitch,
591 y_output_pointer, y_pitch,
592 u_output_pointer, u_pitch,
593 v_output_pointer, v_pitch,
594 kWidth, kHeight);
595 }
596
597 // Compare the output frame with what is expected; expect exactly the same.
598 // Note: MSE should be set to a larger threshold if an odd block width
599 // is used, since the conversion will be lossy.
600 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_expected_pointer,
601 I420_SIZE(kHeight, kWidth), 1.e-6));
602
603 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); }
604}
605
606TEST_P(PlanarFunctionsTest, Q420ToI420) {
607 // Get the unalignment offset
608 int unalignment = GetParam();
609 uint8 *y_pointer = NULL, *yuy2_pointer = NULL;
610 int y_pitch = kWidth;
611 int yuy2_pitch = 2 * ((kWidth + 1) & ~1);
612 int u_pitch = (kWidth + 1) >> 1;
613 int v_pitch = (kWidth + 1) >> 1;
614 int y_size = kHeight * kWidth;
615 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
616 int block_size = 2;
617 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000618 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000619 CreateFakeQ420TestingImage(kHeight, kWidth, block_size,
620 y_pointer, yuy2_pointer));
621 // Allocate space for the output image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000622 rtc::scoped_ptr<uint8[]> yuv_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000623 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment + unalignment]);
624 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment) +
625 unalignment;
626 uint8 *u_output_pointer = y_output_pointer + y_size;
627 uint8 *v_output_pointer = u_output_pointer + uv_size;
628 // Generate the expected output.
629 uint8 *y_expected_pointer = NULL, *u_expected_pointer = NULL,
630 *v_expected_pointer = NULL;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000631 rtc::scoped_ptr<uint8[]> yuv_output_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000632 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
633 libyuv::kJpegYuv420,
634 y_expected_pointer, u_expected_pointer, v_expected_pointer));
635
636 for (int i = 0; i < repeat_; ++i) {
637 libyuv::Q420ToI420(y_pointer, y_pitch,
638 yuy2_pointer, yuy2_pitch,
639 y_output_pointer, y_pitch,
640 u_output_pointer, u_pitch,
641 v_output_pointer, v_pitch,
642 kWidth, kHeight);
643 }
644 // Compare the output frame with what is expected; expect exactly the same.
645 // Note: MSE should be set to a larger threshold if an odd block width
646 // is used, since the conversion will be lossy.
647 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_expected_pointer,
648 I420_SIZE(kHeight, kWidth), 1.e-6));
649
650 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); }
651}
652
653TEST_P(PlanarFunctionsTest, M420ToI420) {
654 // Get the unalignment offset
655 int unalignment = GetParam();
656 uint8 *m420_pointer = NULL;
657 int y_pitch = kWidth;
658 int m420_pitch = kWidth;
659 int u_pitch = (kWidth + 1) >> 1;
660 int v_pitch = (kWidth + 1) >> 1;
661 int y_size = kHeight * kWidth;
662 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
663 int block_size = 2;
664 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000665 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000666 CreateFakeM420TestingImage(kHeight, kWidth, block_size, m420_pointer));
667 // Allocate space for the output image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000668 rtc::scoped_ptr<uint8[]> yuv_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000669 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment + unalignment]);
670 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment) + unalignment;
671 uint8 *u_output_pointer = y_output_pointer + y_size;
672 uint8 *v_output_pointer = u_output_pointer + uv_size;
673 // Generate the expected output.
674 uint8 *y_expected_pointer = NULL, *u_expected_pointer = NULL,
675 *v_expected_pointer = NULL;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000676 rtc::scoped_ptr<uint8[]> yuv_output_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000677 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
678 libyuv::kJpegYuv420,
679 y_expected_pointer, u_expected_pointer, v_expected_pointer));
680
681 for (int i = 0; i < repeat_; ++i) {
682 libyuv::M420ToI420(m420_pointer, m420_pitch,
683 y_output_pointer, y_pitch,
684 u_output_pointer, u_pitch,
685 v_output_pointer, v_pitch,
686 kWidth, kHeight);
687 }
688 // Compare the output frame with what is expected; expect exactly the same.
689 // Note: MSE should be set to a larger threshold if an odd block width
690 // is used, since the conversion will be lossy.
691 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_expected_pointer,
692 I420_SIZE(kHeight, kWidth), 1.e-6));
693
694 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); }
695}
696
697TEST_P(PlanarFunctionsTest, NV12ToI420) {
698 // Get the unalignment offset
699 int unalignment = GetParam();
700 uint8 *y_pointer = NULL, *uv_pointer = NULL;
701 int y_pitch = kWidth;
702 int uv_pitch = 2 * ((kWidth + 1) >> 1);
703 int u_pitch = (kWidth + 1) >> 1;
704 int v_pitch = (kWidth + 1) >> 1;
705 int y_size = kHeight * kWidth;
706 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
707 int block_size = 2;
708 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000709 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000710 CreateFakeNV12TestingImage(kHeight, kWidth, block_size,
711 y_pointer, uv_pointer));
712 // Allocate space for the output image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000713 rtc::scoped_ptr<uint8[]> yuv_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000714 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment + unalignment]);
715 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment) + unalignment;
716 uint8 *u_output_pointer = y_output_pointer + y_size;
717 uint8 *v_output_pointer = u_output_pointer + uv_size;
718 // Generate the expected output.
719 uint8 *y_expected_pointer = NULL, *u_expected_pointer = NULL,
720 *v_expected_pointer = NULL;
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000721 rtc::scoped_ptr<uint8[]> yuv_output_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000722 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
723 libyuv::kJpegYuv420,
724 y_expected_pointer, u_expected_pointer, v_expected_pointer));
725
726 for (int i = 0; i < repeat_; ++i) {
727 libyuv::NV12ToI420(y_pointer, y_pitch,
728 uv_pointer, uv_pitch,
729 y_output_pointer, y_pitch,
730 u_output_pointer, u_pitch,
731 v_output_pointer, v_pitch,
732 kWidth, kHeight);
733 }
734 // Compare the output frame with what is expected; expect exactly the same.
735 // Note: MSE should be set to a larger threshold if an odd block width
736 // is used, since the conversion will be lossy.
737 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_expected_pointer,
738 I420_SIZE(kHeight, kWidth), 1.e-6));
739
740 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); }
741}
742
743// A common macro for testing converting YUY2/UYVY to I420.
744#define TEST_YUVTOI420(SRC_NAME, MSE, BLOCK_SIZE) \
745TEST_P(PlanarFunctionsTest, SRC_NAME##ToI420) { \
746 /* Get the unalignment offset.*/ \
747 int unalignment = GetParam(); \
748 uint8 *yuv_pointer = NULL; \
749 int yuv_pitch = 2 * ((kWidth + 1) & ~1); \
750 int y_pitch = kWidth; \
751 int u_pitch = (kWidth + 1) >> 1; \
752 int v_pitch = (kWidth + 1) >> 1; \
753 int y_size = kHeight * kWidth; \
754 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1); \
755 int block_size = 2; \
756 /* Generate a fake input image.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000757 rtc::scoped_ptr<uint8[]> yuv_input( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000758 CreateFakeInterleaveYuvTestingImage(kHeight, kWidth, BLOCK_SIZE, \
759 yuv_pointer, FOURCC_##SRC_NAME)); \
760 /* Allocate space for the output image.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000761 rtc::scoped_ptr<uint8[]> yuv_output( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000762 new uint8[I420_SIZE(kHeight, kWidth) + kAlignment + unalignment]); \
763 uint8 *y_output_pointer = ALIGNP(yuv_output.get(), kAlignment) + \
764 unalignment; \
765 uint8 *u_output_pointer = y_output_pointer + y_size; \
766 uint8 *v_output_pointer = u_output_pointer + uv_size; \
767 /* Generate the expected output.*/ \
768 uint8 *y_expected_pointer = NULL, *u_expected_pointer = NULL, \
769 *v_expected_pointer = NULL; \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000770 rtc::scoped_ptr<uint8[]> yuv_output_expected( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000771 CreateFakeYuvTestingImage(kHeight, kWidth, block_size, \
772 libyuv::kJpegYuv420, \
773 y_expected_pointer, u_expected_pointer, v_expected_pointer)); \
774 for (int i = 0; i < repeat_; ++i) { \
775 libyuv::SRC_NAME##ToI420(yuv_pointer, yuv_pitch, \
776 y_output_pointer, y_pitch, \
777 u_output_pointer, u_pitch, \
778 v_output_pointer, v_pitch, \
779 kWidth, kHeight); \
780 } \
781 /* Compare the output frame with what is expected.*/ \
782 /* Note: MSE should be set to a larger threshold if an odd block width*/ \
783 /* is used, since the conversion will be lossy.*/ \
784 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_expected_pointer, \
785 I420_SIZE(kHeight, kWidth), MSE)); \
786 if (dump_) { DumpYuvImage(y_output_pointer, kWidth, kHeight); } \
787} \
788
789// TEST_P(PlanarFunctionsTest, YUV2ToI420)
790TEST_YUVTOI420(YUY2, 1.e-6, 2);
791// TEST_P(PlanarFunctionsTest, UYVYToI420)
792TEST_YUVTOI420(UYVY, 1.e-6, 2);
793
794// A common macro for testing converting I420 to ARGB, BGRA and ABGR.
795#define TEST_YUVTORGB(SRC_NAME, DST_NAME, JPG_TYPE, MSE, BLOCK_SIZE) \
796TEST_F(PlanarFunctionsTest, SRC_NAME##To##DST_NAME) { \
797 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL; \
798 uint8 *argb_expected_pointer = NULL; \
799 int y_pitch = kWidth; \
800 int u_pitch = (kWidth + 1) >> 1; \
801 int v_pitch = (kWidth + 1) >> 1; \
802 /* Generate a fake input image.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000803 rtc::scoped_ptr<uint8[]> yuv_input( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000804 CreateFakeYuvTestingImage(kHeight, kWidth, BLOCK_SIZE, JPG_TYPE, \
805 y_pointer, u_pointer, v_pointer)); \
806 /* Generate the expected output.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000807 rtc::scoped_ptr<uint8[]> argb_expected( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000808 CreateFakeArgbTestingImage(kHeight, kWidth, BLOCK_SIZE, \
809 argb_expected_pointer, FOURCC_##DST_NAME)); \
810 /* Allocate space for the output.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000811 rtc::scoped_ptr<uint8[]> argb_output( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000812 new uint8[kHeight * kWidth * 4 + kAlignment]); \
813 uint8 *argb_pointer = ALIGNP(argb_expected.get(), kAlignment); \
814 for (int i = 0; i < repeat_; ++i) { \
815 libyuv::SRC_NAME##To##DST_NAME(y_pointer, y_pitch, \
816 u_pointer, u_pitch, \
817 v_pointer, v_pitch, \
818 argb_pointer, \
819 kWidth * 4, \
820 kWidth, kHeight); \
821 } \
822 EXPECT_TRUE(IsMemoryEqual(argb_expected_pointer, argb_pointer, \
823 kHeight * kWidth * 4, MSE)); \
824 if (dump_) { DumpArgbImage(argb_pointer, kWidth, kHeight); } \
825}
826
827// TEST_F(PlanarFunctionsTest, I420ToARGB)
828TEST_YUVTORGB(I420, ARGB, libyuv::kJpegYuv420, 3., 2);
829// TEST_F(PlanarFunctionsTest, I420ToABGR)
830TEST_YUVTORGB(I420, ABGR, libyuv::kJpegYuv420, 3., 2);
831// TEST_F(PlanarFunctionsTest, I420ToBGRA)
832TEST_YUVTORGB(I420, BGRA, libyuv::kJpegYuv420, 3., 2);
833// TEST_F(PlanarFunctionsTest, I422ToARGB)
834TEST_YUVTORGB(I422, ARGB, libyuv::kJpegYuv422, 3., 2);
835// TEST_F(PlanarFunctionsTest, I444ToARGB)
836TEST_YUVTORGB(I444, ARGB, libyuv::kJpegYuv444, 3., 3);
837// Note: an empirical MSE tolerance 3.0 is used here for the probable
838// error from float-to-uint8 type conversion.
839
840TEST_F(PlanarFunctionsTest, I400ToARGB_Reference) {
841 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL;
842 int y_pitch = kWidth;
843 int u_pitch = (kWidth + 1) >> 1;
844 int v_pitch = (kWidth + 1) >> 1;
845 int block_size = 3;
846 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000847 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000848 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
849 libyuv::kJpegYuv420,
850 y_pointer, u_pointer, v_pointer));
851 // As the comparison standard, we convert a grayscale image (by setting both
852 // U and V channels to be 128) using an I420 converter.
853 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
854
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000855 rtc::scoped_ptr<uint8[]> uv(new uint8[uv_size + kAlignment]);
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000856 u_pointer = v_pointer = ALIGNP(uv.get(), kAlignment);
857 memset(u_pointer, 128, uv_size);
858
859 // Allocate space for the output image and generate the expected output.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000860 rtc::scoped_ptr<uint8[]> argb_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000861 new uint8[kHeight * kWidth * 4 + kAlignment]);
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000862 rtc::scoped_ptr<uint8[]> argb_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000863 new uint8[kHeight * kWidth * 4 + kAlignment]);
864 uint8 *argb_expected_pointer = ALIGNP(argb_expected.get(), kAlignment);
865 uint8 *argb_pointer = ALIGNP(argb_output.get(), kAlignment);
866
867 libyuv::I420ToARGB(y_pointer, y_pitch,
868 u_pointer, u_pitch,
869 v_pointer, v_pitch,
870 argb_expected_pointer, kWidth * 4,
871 kWidth, kHeight);
872 for (int i = 0; i < repeat_; ++i) {
873 libyuv::I400ToARGB_Reference(y_pointer, y_pitch,
874 argb_pointer, kWidth * 4,
875 kWidth, kHeight);
876 }
877
878 // Note: I420ToARGB and I400ToARGB_Reference should produce identical results.
879 EXPECT_TRUE(IsMemoryEqual(argb_expected_pointer, argb_pointer,
880 kHeight * kWidth * 4, 2.));
881 if (dump_) { DumpArgbImage(argb_pointer, kWidth, kHeight); }
882}
883
884TEST_P(PlanarFunctionsTest, I400ToARGB) {
885 // Get the unalignment offset
886 int unalignment = GetParam();
887 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL;
888 int y_pitch = kWidth;
889 int u_pitch = (kWidth + 1) >> 1;
890 int v_pitch = (kWidth + 1) >> 1;
891 int block_size = 3;
892 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000893 rtc::scoped_ptr<uint8[]> yuv_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000894 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
895 libyuv::kJpegYuv420,
896 y_pointer, u_pointer, v_pointer));
897 // As the comparison standard, we convert a grayscale image (by setting both
898 // U and V channels to be 128) using an I420 converter.
899 int uv_size = ((kHeight + 1) >> 1) * ((kWidth + 1) >> 1);
900
901 // 1 byte extra if in the unaligned mode.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000902 rtc::scoped_ptr<uint8[]> uv(new uint8[uv_size * 2 + kAlignment]);
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000903 u_pointer = ALIGNP(uv.get(), kAlignment);
904 v_pointer = u_pointer + uv_size;
905 memset(u_pointer, 128, uv_size);
906 memset(v_pointer, 128, uv_size);
907
908 // Allocate space for the output image and generate the expected output.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000909 rtc::scoped_ptr<uint8[]> argb_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000910 new uint8[kHeight * kWidth * 4 + kAlignment]);
911 // 1 byte extra if in the misalinged mode.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000912 rtc::scoped_ptr<uint8[]> argb_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000913 new uint8[kHeight * kWidth * 4 + kAlignment + unalignment]);
914 uint8 *argb_expected_pointer = ALIGNP(argb_expected.get(), kAlignment);
915 uint8 *argb_pointer = ALIGNP(argb_output.get(), kAlignment) + unalignment;
916
917 libyuv::I420ToARGB(y_pointer, y_pitch,
918 u_pointer, u_pitch,
919 v_pointer, v_pitch,
920 argb_expected_pointer, kWidth * 4,
921 kWidth, kHeight);
922 for (int i = 0; i < repeat_; ++i) {
923 libyuv::I400ToARGB(y_pointer, y_pitch,
924 argb_pointer, kWidth * 4,
925 kWidth, kHeight);
926 }
927
928 // Note: current I400ToARGB uses an approximate method,
929 // so the error tolerance is larger here.
930 EXPECT_TRUE(IsMemoryEqual(argb_expected_pointer, argb_pointer,
931 kHeight * kWidth * 4, 64.0));
932 if (dump_) { DumpArgbImage(argb_pointer, kWidth, kHeight); }
933}
934
935TEST_P(PlanarFunctionsTest, ARGBToI400) {
936 // Get the unalignment offset
937 int unalignment = GetParam();
938 // Create a fake ARGB input image.
939 uint8 *y_pointer = NULL, *u_pointer = NULL, *v_pointer = NULL;
940 uint8 *argb_pointer = NULL;
941 int block_size = 3;
942 // Generate a fake input image.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000943 rtc::scoped_ptr<uint8[]> argb_input(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000944 CreateFakeArgbTestingImage(kHeight, kWidth, block_size,
945 argb_pointer, FOURCC_ARGB));
946 // Generate the expected output. Only Y channel is used
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000947 rtc::scoped_ptr<uint8[]> yuv_expected(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000948 CreateFakeYuvTestingImage(kHeight, kWidth, block_size,
949 libyuv::kJpegYuv420,
950 y_pointer, u_pointer, v_pointer));
951 // Allocate space for the Y output.
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000952 rtc::scoped_ptr<uint8[]> y_output(
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000953 new uint8[kHeight * kWidth + kAlignment + unalignment]);
954 uint8 *y_output_pointer = ALIGNP(y_output.get(), kAlignment) + unalignment;
955
956 for (int i = 0; i < repeat_; ++i) {
957 libyuv::ARGBToI400(argb_pointer, kWidth * 4, y_output_pointer, kWidth,
958 kWidth, kHeight);
959 }
960 // Check if the output matches the input Y channel.
961 // Note: an empirical MSE tolerance 2.0 is used here for the probable
962 // error from float-to-uint8 type conversion.
963 EXPECT_TRUE(IsMemoryEqual(y_output_pointer, y_pointer,
964 kHeight * kWidth, 2.));
965 if (dump_) { DumpArgbImage(argb_pointer, kWidth, kHeight); }
966}
967
968// A common macro for testing converting RAW, BG24, BGRA, and ABGR
969// to ARGB.
970#define TEST_ARGB(SRC_NAME, FC_ID, BPP, BLOCK_SIZE) \
971TEST_P(PlanarFunctionsTest, SRC_NAME##ToARGB) { \
972 int unalignment = GetParam(); /* Get the unalignment offset.*/ \
973 uint8 *argb_expected_pointer = NULL, *src_pointer = NULL; \
974 /* Generate a fake input image.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000975 rtc::scoped_ptr<uint8[]> src_input( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000976 CreateFakeArgbTestingImage(kHeight, kWidth, BLOCK_SIZE, \
977 src_pointer, FOURCC_##FC_ID)); \
978 /* Generate the expected output.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000979 rtc::scoped_ptr<uint8[]> argb_expected( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000980 CreateFakeArgbTestingImage(kHeight, kWidth, BLOCK_SIZE, \
981 argb_expected_pointer, FOURCC_ARGB)); \
982 /* Allocate space for the output; 1 byte extra if in the unaligned mode.*/ \
buildbot@webrtc.orgd4e598d2014-07-29 17:36:52 +0000983 rtc::scoped_ptr<uint8[]> argb_output( \
henrike@webrtc.org704bf9e2014-02-27 17:52:04 +0000984 new uint8[kHeight * kWidth * 4 + kAlignment + unalignment]); \
985 uint8 *argb_pointer = ALIGNP(argb_output.get(), kAlignment) + unalignment; \
986 for (int i = 0; i < repeat_; ++i) { \
987 libyuv:: SRC_NAME##ToARGB(src_pointer, kWidth * (BPP), argb_pointer, \
988 kWidth * 4, kWidth, kHeight); \
989 } \
990 /* Compare the result; expect identical.*/ \
991 EXPECT_TRUE(IsMemoryEqual(argb_expected_pointer, argb_pointer, \
992 kHeight * kWidth * 4, 1.e-6)); \
993 if (dump_) { DumpArgbImage(argb_pointer, kWidth, kHeight); } \
994}
995
996TEST_ARGB(RAW, RAW, 3, 3); // TEST_P(PlanarFunctionsTest, RAWToARGB)
997TEST_ARGB(BG24, 24BG, 3, 3); // TEST_P(PlanarFunctionsTest, BG24ToARGB)
998TEST_ARGB(ABGR, ABGR, 4, 3); // TEST_P(PlanarFunctionsTest, ABGRToARGB)
999TEST_ARGB(BGRA, BGRA, 4, 3); // TEST_P(PlanarFunctionsTest, BGRAToARGB)
1000
1001// Parameter Test: The parameter is the unalignment offset.
1002// Aligned data for testing assembly versions.
1003INSTANTIATE_TEST_CASE_P(PlanarFunctionsAligned, PlanarFunctionsTest,
1004 ::testing::Values(0));
1005
1006// Purposely unalign the output argb pointer to test slow path (C version).
1007INSTANTIATE_TEST_CASE_P(PlanarFunctionsMisaligned, PlanarFunctionsTest,
1008 ::testing::Values(1));
1009
1010} // namespace cricket