Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
Jonas Olsson | a4d8737 | 2019-07-05 19:08:33 +0200 | [diff] [blame] | 11 | #include "api/video/video_frame.h" |
| 12 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 13 | #include <math.h> |
| 14 | #include <string.h> |
| 15 | |
| 16 | #include "api/video/i010_buffer.h" |
| 17 | #include "api/video/i420_buffer.h" |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 18 | #include "api/video/i422_buffer.h" |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 19 | #include "api/video/i444_buffer.h" |
Ilya Nikolaevskiy | 38e9b06 | 2020-10-08 14:36:33 +0000 | [diff] [blame] | 20 | #include "api/video/nv12_buffer.h" |
Steve Anton | 10542f2 | 2019-01-11 09:11:00 -0800 | [diff] [blame] | 21 | #include "rtc_base/time_utils.h" |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 22 | #include "test/fake_texture_frame.h" |
| 23 | #include "test/frame_utils.h" |
| 24 | #include "test/gtest.h" |
| 25 | |
| 26 | namespace webrtc { |
| 27 | |
| 28 | namespace { |
| 29 | |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 30 | struct SubSampling { |
| 31 | int x; |
| 32 | int y; |
| 33 | }; |
| 34 | |
| 35 | SubSampling SubSamplingForType(VideoFrameBuffer::Type type) { |
| 36 | switch (type) { |
| 37 | case VideoFrameBuffer::Type::kI420: |
| 38 | return {.x = 2, .y = 2}; |
| 39 | case VideoFrameBuffer::Type::kI420A: |
| 40 | return {.x = 2, .y = 2}; |
| 41 | case VideoFrameBuffer::Type::kI422: |
| 42 | return {.x = 2, .y = 1}; |
| 43 | case VideoFrameBuffer::Type::kI444: |
| 44 | return {.x = 1, .y = 1}; |
| 45 | case VideoFrameBuffer::Type::kI010: |
| 46 | return {.x = 2, .y = 2}; |
| 47 | default: |
| 48 | return {}; |
| 49 | } |
| 50 | } |
| 51 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 52 | // Helper function to create a buffer and fill it with a gradient for |
| 53 | // PlanarYuvBuffer based buffers. |
| 54 | template <class T> |
| 55 | rtc::scoped_refptr<T> CreateGradient(int width, int height) { |
| 56 | rtc::scoped_refptr<T> buffer(T::Create(width, height)); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 57 | // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h |
| 58 | for (int x = 0; x < width; x++) { |
| 59 | for (int y = 0; y < height; y++) { |
| 60 | buffer->MutableDataY()[x + y * width] = |
| 61 | 128 * (x * height + y * width) / (width * height); |
| 62 | } |
| 63 | } |
| 64 | int chroma_width = buffer->ChromaWidth(); |
| 65 | int chroma_height = buffer->ChromaHeight(); |
| 66 | for (int x = 0; x < chroma_width; x++) { |
| 67 | for (int y = 0; y < chroma_height; y++) { |
| 68 | buffer->MutableDataU()[x + y * chroma_width] = |
| 69 | 255 * x / (chroma_width - 1); |
| 70 | buffer->MutableDataV()[x + y * chroma_width] = |
| 71 | 255 * y / (chroma_height - 1); |
| 72 | } |
| 73 | } |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 74 | return buffer; |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 75 | } |
| 76 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 77 | // Helper function to create a buffer and fill it with a gradient. |
Ilya Nikolaevskiy | 38e9b06 | 2020-10-08 14:36:33 +0000 | [diff] [blame] | 78 | rtc::scoped_refptr<NV12BufferInterface> CreateNV12Gradient(int width, |
| 79 | int height) { |
| 80 | rtc::scoped_refptr<NV12Buffer> buffer(NV12Buffer::Create(width, height)); |
| 81 | // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h |
| 82 | for (int x = 0; x < width; x++) { |
| 83 | for (int y = 0; y < height; y++) { |
| 84 | buffer->MutableDataY()[x + y * width] = |
| 85 | 128 * (x * height + y * width) / (width * height); |
| 86 | } |
| 87 | } |
| 88 | int chroma_width = buffer->ChromaWidth(); |
| 89 | int chroma_height = buffer->ChromaHeight(); |
| 90 | for (int x = 0; x < chroma_width; x++) { |
| 91 | for (int y = 0; y < chroma_height; y++) { |
| 92 | buffer->MutableDataUV()[x * 2 + y * buffer->StrideUV()] = |
| 93 | 255 * x / (chroma_width - 1); |
| 94 | buffer->MutableDataUV()[x * 2 + 1 + y * buffer->StrideUV()] = |
| 95 | 255 * y / (chroma_height - 1); |
| 96 | } |
| 97 | } |
| 98 | return buffer; |
| 99 | } |
| 100 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 101 | // The offsets and sizes describe the rectangle extracted from the |
| 102 | // original (gradient) frame, in relative coordinates where the |
| 103 | // original frame correspond to the unit square, 0.0 <= x, y < 1.0. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 104 | template <class T> |
| 105 | void CheckCrop(const T& frame, |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 106 | double offset_x, |
| 107 | double offset_y, |
| 108 | double rel_width, |
| 109 | double rel_height) { |
| 110 | int width = frame.width(); |
| 111 | int height = frame.height(); |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 112 | |
| 113 | SubSampling plane_divider = SubSamplingForType(frame.type()); |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 114 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 115 | // Check that pixel values in the corners match the gradient used |
| 116 | // for initialization. |
| 117 | for (int i = 0; i < 2; i++) { |
| 118 | for (int j = 0; j < 2; j++) { |
| 119 | // Pixel coordinates of the corner. |
| 120 | int x = i * (width - 1); |
| 121 | int y = j * (height - 1); |
| 122 | // Relative coordinates, range 0.0 - 1.0 correspond to the |
| 123 | // size of the uncropped input frame. |
| 124 | double orig_x = offset_x + i * rel_width; |
| 125 | double orig_y = offset_y + j * rel_height; |
| 126 | |
| 127 | EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0, |
| 128 | (orig_x + orig_y) / 2, 0.02); |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 129 | EXPECT_NEAR(frame.DataU()[x / plane_divider.x + |
| 130 | (y / plane_divider.y) * frame.StrideU()] / |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 131 | 256.0, |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 132 | orig_x, 0.02); |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 133 | EXPECT_NEAR(frame.DataV()[x / plane_divider.x + |
| 134 | (y / plane_divider.y) * frame.StrideV()] / |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 135 | 256.0, |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 136 | orig_y, 0.02); |
| 137 | } |
| 138 | } |
| 139 | } |
| 140 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 141 | template <class T> |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 142 | void CheckRotate(int width, |
| 143 | int height, |
| 144 | webrtc::VideoRotation rotation, |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 145 | const T& rotated) { |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 146 | int rotated_width = width; |
| 147 | int rotated_height = height; |
| 148 | |
| 149 | if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { |
| 150 | std::swap(rotated_width, rotated_height); |
| 151 | } |
| 152 | EXPECT_EQ(rotated_width, rotated.width()); |
| 153 | EXPECT_EQ(rotated_height, rotated.height()); |
| 154 | |
| 155 | // Clock-wise order (with 0,0 at top-left) |
| 156 | const struct { |
| 157 | int x; |
| 158 | int y; |
| 159 | } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}}; |
| 160 | // Corresponding corner colors of the frame produced by CreateGradient. |
| 161 | const struct { |
| 162 | int y; |
| 163 | int u; |
| 164 | int v; |
| 165 | } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}}; |
| 166 | int corner_offset = static_cast<int>(rotation) / 90; |
| 167 | |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 168 | SubSampling plane_divider = SubSamplingForType(rotated.type()); |
| 169 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 170 | for (int i = 0; i < 4; i++) { |
| 171 | int j = (i + corner_offset) % 4; |
| 172 | int x = corners[j].x * (rotated_width - 1); |
| 173 | int y = corners[j].y * (rotated_height - 1); |
| 174 | EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]); |
| 175 | EXPECT_EQ(colors[i].u, |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 176 | rotated.DataU()[(x / plane_divider.x) + |
| 177 | (y / plane_divider.y) * rotated.StrideU()]); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 178 | EXPECT_EQ(colors[i].v, |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 179 | rotated.DataV()[(x / plane_divider.x) + |
| 180 | (y / plane_divider.y) * rotated.StrideV()]); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 181 | } |
| 182 | } |
| 183 | |
| 184 | } // namespace |
| 185 | |
| 186 | TEST(TestVideoFrame, WidthHeightValues) { |
Artem Titov | 1ebfb6a | 2019-01-03 23:49:37 +0100 | [diff] [blame] | 187 | VideoFrame frame = |
| 188 | VideoFrame::Builder() |
| 189 | .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90)) |
| 190 | .set_rotation(webrtc::kVideoRotation_0) |
| 191 | .set_timestamp_ms(789) |
| 192 | .build(); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 193 | const int valid_value = 10; |
| 194 | EXPECT_EQ(valid_value, frame.width()); |
| 195 | EXPECT_EQ(valid_value, frame.height()); |
| 196 | frame.set_timestamp(123u); |
| 197 | EXPECT_EQ(123u, frame.timestamp()); |
| 198 | frame.set_ntp_time_ms(456); |
| 199 | EXPECT_EQ(456, frame.ntp_time_ms()); |
| 200 | EXPECT_EQ(789, frame.render_time_ms()); |
| 201 | } |
| 202 | |
| 203 | TEST(TestVideoFrame, ShallowCopy) { |
| 204 | uint32_t timestamp = 1; |
| 205 | int64_t ntp_time_ms = 2; |
| 206 | int64_t timestamp_us = 3; |
| 207 | int stride_y = 15; |
| 208 | int stride_u = 10; |
| 209 | int stride_v = 10; |
| 210 | int width = 15; |
| 211 | int height = 15; |
| 212 | |
| 213 | const int kSizeY = 400; |
| 214 | const int kSizeU = 100; |
| 215 | const int kSizeV = 100; |
| 216 | const VideoRotation kRotation = kVideoRotation_270; |
| 217 | uint8_t buffer_y[kSizeY]; |
| 218 | uint8_t buffer_u[kSizeU]; |
| 219 | uint8_t buffer_v[kSizeV]; |
| 220 | memset(buffer_y, 16, kSizeY); |
| 221 | memset(buffer_u, 8, kSizeU); |
| 222 | memset(buffer_v, 4, kSizeV); |
| 223 | |
Artem Titov | 1ebfb6a | 2019-01-03 23:49:37 +0100 | [diff] [blame] | 224 | VideoFrame frame1 = VideoFrame::Builder() |
| 225 | .set_video_frame_buffer(I420Buffer::Copy( |
| 226 | width, height, buffer_y, stride_y, buffer_u, |
| 227 | stride_u, buffer_v, stride_v)) |
| 228 | .set_rotation(kRotation) |
| 229 | .set_timestamp_us(0) |
| 230 | .build(); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 231 | frame1.set_timestamp(timestamp); |
| 232 | frame1.set_ntp_time_ms(ntp_time_ms); |
| 233 | frame1.set_timestamp_us(timestamp_us); |
| 234 | VideoFrame frame2(frame1); |
| 235 | |
| 236 | EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer()); |
Ilya Nikolaevskiy | a8507e3 | 2019-05-03 11:39:26 +0200 | [diff] [blame] | 237 | const webrtc::I420BufferInterface* yuv1 = |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 238 | frame1.video_frame_buffer()->GetI420(); |
Ilya Nikolaevskiy | a8507e3 | 2019-05-03 11:39:26 +0200 | [diff] [blame] | 239 | const webrtc::I420BufferInterface* yuv2 = |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 240 | frame2.video_frame_buffer()->GetI420(); |
| 241 | EXPECT_EQ(yuv1->DataY(), yuv2->DataY()); |
| 242 | EXPECT_EQ(yuv1->DataU(), yuv2->DataU()); |
| 243 | EXPECT_EQ(yuv1->DataV(), yuv2->DataV()); |
| 244 | |
| 245 | EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); |
| 246 | EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); |
| 247 | EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); |
| 248 | EXPECT_EQ(frame2.rotation(), frame1.rotation()); |
| 249 | |
| 250 | frame2.set_timestamp(timestamp + 1); |
| 251 | frame2.set_ntp_time_ms(ntp_time_ms + 1); |
| 252 | frame2.set_timestamp_us(timestamp_us + 1); |
| 253 | frame2.set_rotation(kVideoRotation_90); |
| 254 | |
| 255 | EXPECT_NE(frame2.timestamp(), frame1.timestamp()); |
| 256 | EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); |
| 257 | EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us()); |
| 258 | EXPECT_NE(frame2.rotation(), frame1.rotation()); |
| 259 | } |
| 260 | |
| 261 | TEST(TestVideoFrame, TextureInitialValues) { |
| 262 | VideoFrame frame = test::FakeNativeBuffer::CreateFrame( |
| 263 | 640, 480, 100, 10, webrtc::kVideoRotation_0); |
| 264 | EXPECT_EQ(640, frame.width()); |
| 265 | EXPECT_EQ(480, frame.height()); |
| 266 | EXPECT_EQ(100u, frame.timestamp()); |
| 267 | EXPECT_EQ(10, frame.render_time_ms()); |
| 268 | ASSERT_TRUE(frame.video_frame_buffer() != nullptr); |
| 269 | EXPECT_TRUE(frame.video_frame_buffer()->type() == |
| 270 | VideoFrameBuffer::Type::kNative); |
| 271 | |
| 272 | frame.set_timestamp(200); |
| 273 | EXPECT_EQ(200u, frame.timestamp()); |
| 274 | frame.set_timestamp_us(20); |
| 275 | EXPECT_EQ(20, frame.timestamp_us()); |
| 276 | } |
| 277 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 278 | template <typename T> |
| 279 | class TestPlanarYuvBuffer : public ::testing::Test {}; |
| 280 | TYPED_TEST_SUITE_P(TestPlanarYuvBuffer); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 281 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 282 | template <class T> |
| 283 | rtc::scoped_refptr<T> CreateAndFillBuffer() { |
| 284 | auto buf = T::Create(20, 10); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 285 | memset(buf->MutableDataY(), 1, 200); |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 286 | |
| 287 | if (buf->type() == VideoFrameBuffer::Type::kI444) { |
| 288 | memset(buf->MutableDataU(), 2, 200); |
| 289 | memset(buf->MutableDataV(), 3, 200); |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 290 | } else if (buf->type() == VideoFrameBuffer::Type::kI422) { |
| 291 | memset(buf->MutableDataU(), 2, 100); |
| 292 | memset(buf->MutableDataV(), 3, 100); |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 293 | } else { |
| 294 | memset(buf->MutableDataU(), 2, 50); |
| 295 | memset(buf->MutableDataV(), 3, 50); |
| 296 | } |
| 297 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 298 | return buf; |
| 299 | } |
| 300 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 301 | TYPED_TEST_P(TestPlanarYuvBuffer, Copy) { |
| 302 | rtc::scoped_refptr<TypeParam> buf1 = CreateAndFillBuffer<TypeParam>(); |
| 303 | rtc::scoped_refptr<TypeParam> buf2 = TypeParam::Copy(*buf1); |
| 304 | EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2)); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 305 | } |
| 306 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 307 | TYPED_TEST_P(TestPlanarYuvBuffer, CropXCenter) { |
| 308 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 309 | |
| 310 | // Pure center cropping, no scaling. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 311 | rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100); |
| 312 | scaled_buffer->CropAndScaleFrom(*buf, 50, 0, 100, 100); |
| 313 | CheckCrop<TypeParam>(*scaled_buffer, 0.25, 0.0, 0.5, 1.0); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 314 | } |
| 315 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 316 | TYPED_TEST_P(TestPlanarYuvBuffer, CropXNotCenter) { |
| 317 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 318 | |
| 319 | // Non-center cropping, no scaling. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 320 | rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100); |
| 321 | scaled_buffer->CropAndScaleFrom(*buf, 25, 0, 100, 100); |
| 322 | CheckCrop<TypeParam>(*scaled_buffer, 0.125, 0.0, 0.5, 1.0); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 323 | } |
| 324 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 325 | TYPED_TEST_P(TestPlanarYuvBuffer, CropYCenter) { |
| 326 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(100, 200); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 327 | |
| 328 | // Pure center cropping, no scaling. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 329 | rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100); |
| 330 | scaled_buffer->CropAndScaleFrom(*buf, 0, 50, 100, 100); |
| 331 | CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.25, 1.0, 0.5); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 332 | } |
| 333 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 334 | TYPED_TEST_P(TestPlanarYuvBuffer, CropYNotCenter) { |
| 335 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(100, 200); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 336 | |
| 337 | // Pure center cropping, no scaling. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 338 | rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(100, 100); |
| 339 | scaled_buffer->CropAndScaleFrom(*buf, 0, 25, 100, 100); |
| 340 | CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.125, 1.0, 0.5); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 341 | } |
| 342 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 343 | TYPED_TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) { |
| 344 | const int buffer_width = 640; |
| 345 | const int buffer_height = 480; |
| 346 | const int crop_width = 320; |
| 347 | const int crop_height = 180; |
| 348 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(640, 480); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 349 | |
| 350 | // Pure center cropping, no scaling. |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 351 | const int out_width = |
| 352 | std::min(buffer_width, crop_width * buffer_height / crop_height); |
| 353 | const int out_height = |
| 354 | std::min(buffer_height, crop_height * buffer_width / crop_width); |
| 355 | rtc::scoped_refptr<TypeParam> scaled_buffer = |
| 356 | TypeParam::Create(out_width, out_height); |
| 357 | scaled_buffer->CropAndScaleFrom(*buf, (buffer_width - out_width) / 2, |
| 358 | (buffer_height - out_height) / 2, out_width, |
| 359 | out_height); |
| 360 | CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.125, 1.0, 0.75); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 361 | } |
| 362 | |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 363 | REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBuffer, |
| 364 | Copy, |
| 365 | CropXCenter, |
| 366 | CropXNotCenter, |
| 367 | CropYCenter, |
| 368 | CropYNotCenter, |
| 369 | CropAndScale16x9); |
| 370 | |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 371 | using TestTypesAll = |
| 372 | ::testing::Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer>; |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 373 | INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBuffer, TestTypesAll); |
| 374 | |
| 375 | template <class T> |
| 376 | class TestPlanarYuvBufferScale : public ::testing::Test {}; |
| 377 | TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale); |
| 378 | |
| 379 | TYPED_TEST_P(TestPlanarYuvBufferScale, Scale) { |
| 380 | rtc::scoped_refptr<TypeParam> buf = CreateGradient<TypeParam>(200, 100); |
| 381 | |
| 382 | // Pure scaling, no cropping. |
| 383 | rtc::scoped_refptr<TypeParam> scaled_buffer = TypeParam::Create(150, 75); |
| 384 | scaled_buffer->ScaleFrom(*buf); |
| 385 | CheckCrop<TypeParam>(*scaled_buffer, 0.0, 0.0, 1.0, 1.0); |
| 386 | } |
| 387 | |
| 388 | REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale, Scale); |
| 389 | |
| 390 | using TestTypesScale = ::testing::Types<I420Buffer, I010Buffer>; |
| 391 | INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBufferScale, TestTypesScale); |
| 392 | |
| 393 | template <class T> |
| 394 | class TestPlanarYuvBufferRotate : public ::testing::Test { |
| 395 | public: |
| 396 | std::vector<webrtc::VideoRotation> RotationParams = { |
| 397 | kVideoRotation_0, kVideoRotation_90, kVideoRotation_180, |
| 398 | kVideoRotation_270}; |
| 399 | }; |
| 400 | |
| 401 | TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate); |
| 402 | |
| 403 | TYPED_TEST_P(TestPlanarYuvBufferRotate, Rotates) { |
| 404 | for (const webrtc::VideoRotation& rotation : this->RotationParams) { |
| 405 | rtc::scoped_refptr<TypeParam> buffer = CreateGradient<TypeParam>(640, 480); |
| 406 | rtc::scoped_refptr<TypeParam> rotated_buffer = |
| 407 | TypeParam::Rotate(*buffer, rotation); |
| 408 | CheckRotate(640, 480, rotation, *rotated_buffer); |
| 409 | } |
| 410 | } |
| 411 | |
| 412 | REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate, Rotates); |
| 413 | |
Sergio Garcia Murillo | b63536f | 2022-03-25 09:04:09 +0100 | [diff] [blame] | 414 | using TestTypesRotate = |
| 415 | ::testing::Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer>; |
Stefan Mitic | 1535b0a | 2022-02-24 08:54:33 -0800 | [diff] [blame] | 416 | INSTANTIATE_TYPED_TEST_SUITE_P(Rotate, |
| 417 | TestPlanarYuvBufferRotate, |
| 418 | TestTypesRotate); |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 419 | |
Ilya Nikolaevskiy | 38e9b06 | 2020-10-08 14:36:33 +0000 | [diff] [blame] | 420 | TEST(TestNV12Buffer, CropAndScale) { |
| 421 | const int kSourceWidth = 640; |
| 422 | const int kSourceHeight = 480; |
| 423 | const int kScaledWidth = 320; |
| 424 | const int kScaledHeight = 240; |
| 425 | const int kCropLeft = 40; |
| 426 | const int kCropTop = 30; |
| 427 | const int kCropRight = 0; |
| 428 | const int kCropBottom = 30; |
| 429 | |
| 430 | rtc::scoped_refptr<VideoFrameBuffer> buf = |
| 431 | CreateNV12Gradient(kSourceWidth, kSourceHeight); |
| 432 | |
| 433 | rtc::scoped_refptr<VideoFrameBuffer> scaled_buffer = buf->CropAndScale( |
| 434 | kCropLeft, kCropTop, kSourceWidth - kCropLeft - kCropRight, |
| 435 | kSourceHeight - kCropTop - kCropBottom, kScaledWidth, kScaledHeight); |
| 436 | |
| 437 | // Parameters to CheckCrop indicate what part of the source frame is in the |
| 438 | // scaled frame. |
| 439 | const float kOffsetX = (kCropLeft + 0.0) / kSourceWidth; |
| 440 | const float kOffsetY = (kCropTop + 0.0) / kSourceHeight; |
| 441 | const float kRelativeWidth = |
| 442 | (kSourceWidth - kCropLeft - kCropRight + 0.0) / kSourceWidth; |
| 443 | const float kRelativeHeight = |
| 444 | (kSourceHeight - kCropTop - kCropBottom + 0.0) / kSourceHeight; |
| 445 | CheckCrop(*scaled_buffer->ToI420(), kOffsetX, kOffsetY, kRelativeWidth, |
| 446 | kRelativeHeight); |
| 447 | } |
| 448 | |
Ilya Nikolaevskiy | 0660cee | 2019-11-19 14:57:57 +0100 | [diff] [blame] | 449 | TEST(TestUpdateRect, CanCompare) { |
| 450 | VideoFrame::UpdateRect a = {0, 0, 100, 200}; |
| 451 | VideoFrame::UpdateRect b = {0, 0, 100, 200}; |
| 452 | VideoFrame::UpdateRect c = {1, 0, 100, 200}; |
| 453 | VideoFrame::UpdateRect d = {0, 1, 100, 200}; |
| 454 | EXPECT_TRUE(a == b); |
| 455 | EXPECT_FALSE(a == c); |
| 456 | EXPECT_FALSE(a == d); |
| 457 | } |
| 458 | |
| 459 | TEST(TestUpdateRect, ComputesIsEmpty) { |
| 460 | VideoFrame::UpdateRect a = {0, 0, 0, 0}; |
| 461 | VideoFrame::UpdateRect b = {0, 0, 100, 200}; |
| 462 | VideoFrame::UpdateRect c = {1, 100, 0, 0}; |
| 463 | VideoFrame::UpdateRect d = {1, 100, 100, 200}; |
| 464 | EXPECT_TRUE(a.IsEmpty()); |
| 465 | EXPECT_FALSE(b.IsEmpty()); |
| 466 | EXPECT_TRUE(c.IsEmpty()); |
| 467 | EXPECT_FALSE(d.IsEmpty()); |
| 468 | } |
| 469 | |
| 470 | TEST(TestUpdateRectUnion, NonIntersecting) { |
| 471 | VideoFrame::UpdateRect a = {0, 0, 10, 20}; |
| 472 | VideoFrame::UpdateRect b = {100, 200, 10, 20}; |
| 473 | a.Union(b); |
| 474 | EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 110, 220})); |
| 475 | } |
| 476 | |
| 477 | TEST(TestUpdateRectUnion, Intersecting) { |
| 478 | VideoFrame::UpdateRect a = {0, 0, 10, 10}; |
| 479 | VideoFrame::UpdateRect b = {5, 5, 30, 20}; |
| 480 | a.Union(b); |
| 481 | EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 35, 25})); |
| 482 | } |
| 483 | |
| 484 | TEST(TestUpdateRectUnion, OneInsideAnother) { |
| 485 | VideoFrame::UpdateRect a = {0, 0, 100, 100}; |
| 486 | VideoFrame::UpdateRect b = {5, 5, 30, 20}; |
| 487 | a.Union(b); |
| 488 | EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 100, 100})); |
| 489 | } |
| 490 | |
| 491 | TEST(TestUpdateRectIntersect, NonIntersecting) { |
| 492 | VideoFrame::UpdateRect a = {0, 0, 10, 20}; |
| 493 | VideoFrame::UpdateRect b = {100, 200, 10, 20}; |
| 494 | a.Intersect(b); |
| 495 | EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 0, 0})); |
| 496 | } |
| 497 | |
| 498 | TEST(TestUpdateRectIntersect, Intersecting) { |
| 499 | VideoFrame::UpdateRect a = {0, 0, 10, 10}; |
| 500 | VideoFrame::UpdateRect b = {5, 5, 30, 20}; |
| 501 | a.Intersect(b); |
| 502 | EXPECT_EQ(a, VideoFrame::UpdateRect({5, 5, 5, 5})); |
| 503 | } |
| 504 | |
| 505 | TEST(TestUpdateRectIntersect, OneInsideAnother) { |
| 506 | VideoFrame::UpdateRect a = {0, 0, 100, 100}; |
| 507 | VideoFrame::UpdateRect b = {5, 5, 30, 20}; |
| 508 | a.Intersect(b); |
| 509 | EXPECT_EQ(a, VideoFrame::UpdateRect({5, 5, 30, 20})); |
| 510 | } |
| 511 | |
| 512 | TEST(TestUpdateRectScale, NoScale) { |
| 513 | const int width = 640; |
| 514 | const int height = 480; |
| 515 | VideoFrame::UpdateRect a = {100, 50, 100, 200}; |
| 516 | VideoFrame::UpdateRect scaled = |
| 517 | a.ScaleWithFrame(width, height, 0, 0, width, height, width, height); |
| 518 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({100, 50, 100, 200})); |
| 519 | } |
| 520 | |
| 521 | TEST(TestUpdateRectScale, CropOnly) { |
| 522 | const int width = 640; |
| 523 | const int height = 480; |
| 524 | VideoFrame::UpdateRect a = {100, 50, 100, 200}; |
| 525 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 526 | width, height, 10, 10, width - 20, height - 20, width - 20, height - 20); |
| 527 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({90, 40, 100, 200})); |
| 528 | } |
| 529 | |
| 530 | TEST(TestUpdateRectScale, CropOnlyToOddOffset) { |
| 531 | const int width = 640; |
| 532 | const int height = 480; |
| 533 | VideoFrame::UpdateRect a = {100, 50, 100, 200}; |
| 534 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 535 | width, height, 5, 5, width - 10, height - 10, width - 10, height - 10); |
| 536 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({94, 44, 102, 202})); |
| 537 | } |
| 538 | |
| 539 | TEST(TestUpdateRectScale, ScaleByHalf) { |
| 540 | const int width = 640; |
| 541 | const int height = 480; |
| 542 | VideoFrame::UpdateRect a = {100, 60, 100, 200}; |
| 543 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 544 | width, height, 0, 0, width, height, width / 2, height / 2); |
| 545 | // Scaled by half and +2 pixels in all directions. |
| 546 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({48, 28, 54, 104})); |
| 547 | } |
| 548 | |
| 549 | TEST(TestUpdateRectScale, CropToUnchangedRegionBelowUpdateRect) { |
| 550 | const int width = 640; |
| 551 | const int height = 480; |
| 552 | VideoFrame::UpdateRect a = {100, 60, 100, 200}; |
| 553 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 554 | width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10); |
| 555 | // Update is out of the cropped frame. |
| 556 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 0, 0})); |
| 557 | } |
| 558 | |
| 559 | TEST(TestUpdateRectScale, CropToUnchangedRegionAboveUpdateRect) { |
| 560 | const int width = 640; |
| 561 | const int height = 480; |
| 562 | VideoFrame::UpdateRect a = {600, 400, 10, 10}; |
| 563 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 564 | width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10); |
| 565 | // Update is out of the cropped frame. |
| 566 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 0, 0})); |
| 567 | } |
| 568 | |
| 569 | TEST(TestUpdateRectScale, CropInsideUpdate) { |
| 570 | const int width = 640; |
| 571 | const int height = 480; |
| 572 | VideoFrame::UpdateRect a = {300, 200, 100, 100}; |
| 573 | VideoFrame::UpdateRect scaled = a.ScaleWithFrame( |
| 574 | width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10); |
| 575 | // Cropped frame is inside the update rect. |
| 576 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 10, 10})); |
| 577 | } |
| 578 | |
| 579 | TEST(TestUpdateRectScale, CropAndScaleByHalf) { |
| 580 | const int width = 640; |
| 581 | const int height = 480; |
| 582 | VideoFrame::UpdateRect a = {100, 60, 100, 200}; |
| 583 | VideoFrame::UpdateRect scaled = |
| 584 | a.ScaleWithFrame(width, height, 10, 10, width - 20, height - 20, |
| 585 | (width - 20) / 2, (height - 20) / 2); |
| 586 | // Scaled by half and +3 pixels in all directions, because of odd offset after |
| 587 | // crop and scale. |
| 588 | EXPECT_EQ(scaled, VideoFrame::UpdateRect({42, 22, 56, 106})); |
| 589 | } |
| 590 | |
Emircan Uysaler | 901e0ff | 2018-06-26 12:22:38 -0700 | [diff] [blame] | 591 | } // namespace webrtc |