Delete method webrtc::VideoFrame::native_handle.
Instead, use the corresponding method on VideoFrameBuffer. In the process,
reduce code duplication in frame comparison functions used in
the test code.
Make FramesEqual use FrameBufsEqual. Make the latter support texture frames.
The cl also refactors VideoFrame::CopyFrame to use I420Buffer::Copy. This
has possibly undesired side effects of never reusing the frame buffer of
the destination frame, and producing a frame buffer which may use different
stride than the source frame.
BUG=webrtc:5682
Review URL: https://codereview.webrtc.org/1881953002
Cr-Commit-Position: refs/heads/master@{#12373}
diff --git a/webrtc/test/frame_utils.cc b/webrtc/test/frame_utils.cc
index 0f41144..21daa44 100644
--- a/webrtc/test/frame_utils.cc
+++ b/webrtc/test/frame_utils.cc
@@ -16,53 +16,60 @@
bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
- int stride,
+ int stride1,
+ int stride2,
int width,
int height) {
for (int y = 0; y < height; ++y) {
if (memcmp(data1, data2, width) != 0)
return false;
- data1 += stride;
- data2 += stride;
+ data1 += stride1;
+ data2 += stride2;
}
return true;
}
+
bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) {
- if (f1.width() != f2.width() || f1.height() != f2.height() ||
- f1.stride(webrtc::kYPlane) != f2.stride(webrtc::kYPlane) ||
- f1.stride(webrtc::kUPlane) != f2.stride(webrtc::kUPlane) ||
- f1.stride(webrtc::kVPlane) != f2.stride(webrtc::kVPlane) ||
- f1.timestamp() != f2.timestamp() ||
+ if (f1.timestamp() != f2.timestamp() ||
f1.ntp_time_ms() != f2.ntp_time_ms() ||
f1.render_time_ms() != f2.render_time_ms()) {
return false;
}
- const int half_width = (f1.width() + 1) / 2;
- const int half_height = (f1.height() + 1) / 2;
- return EqualPlane(f1.buffer(webrtc::kYPlane), f2.buffer(webrtc::kYPlane),
- f1.stride(webrtc::kYPlane), f1.width(), f1.height()) &&
- EqualPlane(f1.buffer(webrtc::kUPlane), f2.buffer(webrtc::kUPlane),
- f1.stride(webrtc::kUPlane), half_width, half_height) &&
- EqualPlane(f1.buffer(webrtc::kVPlane), f2.buffer(webrtc::kVPlane),
- f1.stride(webrtc::kVPlane), half_width, half_height);
+ return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer());
}
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2) {
- if (f1->width() != f2->width() || f1->height() != f2->height() ||
- f1->stride(webrtc::kYPlane) != f2->stride(webrtc::kYPlane) ||
- f1->stride(webrtc::kUPlane) != f2->stride(webrtc::kUPlane) ||
- f1->stride(webrtc::kVPlane) != f2->stride(webrtc::kVPlane)) {
+ if (f1 == f2) {
+ return true;
+ }
+ // Exlude nullptr (except if both are nullptr, as above)
+ if (!f1 || !f2) {
+ return false;
+ }
+
+ if (f1->width() != f2->width() || f1->height() != f2->height()) {
+ return false;
+ }
+ // Exclude native handle
+ if (f1->native_handle()) {
+ return f1->native_handle() == f2->native_handle();
+ }
+
+ if (f2->native_handle()) {
return false;
}
const int half_width = (f1->width() + 1) / 2;
const int half_height = (f1->height() + 1) / 2;
return EqualPlane(f1->data(webrtc::kYPlane), f2->data(webrtc::kYPlane),
- f1->stride(webrtc::kYPlane), f1->width(), f1->height()) &&
+ f1->stride(webrtc::kYPlane), f2->stride(webrtc::kYPlane),
+ f1->width(), f1->height()) &&
EqualPlane(f1->data(webrtc::kUPlane), f2->data(webrtc::kUPlane),
- f1->stride(webrtc::kUPlane), half_width, half_height) &&
+ f1->stride(webrtc::kUPlane), f2->stride(webrtc::kUPlane),
+ half_width, half_height) &&
EqualPlane(f1->data(webrtc::kVPlane), f2->data(webrtc::kVPlane),
- f1->stride(webrtc::kVPlane), half_width, half_height);
+ f1->stride(webrtc::kVPlane), f2->stride(webrtc::kVPlane),
+ half_width, half_height);
}
} // namespace test