Enable video processing unittest to take video clips as param.
This change enables video processing unittest (including all tests under
it, e.g. denoiser test) to use a set of video clips as param, which is
important if we want to do a regression test on the visual quality
offline.
BUG=
Review URL: https://codereview.webrtc.org/1907353004
Cr-Commit-Position: refs/heads/master@{#12485}
diff --git a/webrtc/modules/video_processing/test/brightness_detection_test.cc b/webrtc/modules/video_processing/test/brightness_detection_test.cc
index abce518..409cf1f 100644
--- a/webrtc/modules/video_processing/test/brightness_detection_test.cc
+++ b/webrtc/modules/video_processing/test/brightness_detection_test.cc
@@ -21,7 +21,7 @@
#else
#define MAYBE_BrightnessDetection BrightnessDetection
#endif
-TEST_F(VideoProcessingTest, MAYBE_BrightnessDetection) {
+TEST_P(VideoProcessingTest, MAYBE_BrightnessDetection) {
uint32_t frameNum = 0;
int32_t brightnessWarning = 0;
uint32_t warningCount = 0;
diff --git a/webrtc/modules/video_processing/test/content_metrics_test.cc b/webrtc/modules/video_processing/test/content_metrics_test.cc
index 80bb564..c5b7285 100644
--- a/webrtc/modules/video_processing/test/content_metrics_test.cc
+++ b/webrtc/modules/video_processing/test/content_metrics_test.cc
@@ -18,9 +18,9 @@
namespace webrtc {
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_ContentAnalysis) {
+TEST_P(VideoProcessingTest, DISABLED_ContentAnalysis) {
#else
-TEST_F(VideoProcessingTest, ContentAnalysis) {
+TEST_P(VideoProcessingTest, ContentAnalysis) {
#endif
VPMContentAnalysis ca__c(false);
VPMContentAnalysis ca__sse(true);
diff --git a/webrtc/modules/video_processing/test/deflickering_test.cc b/webrtc/modules/video_processing/test/deflickering_test.cc
index 5ff5692..ad0c628 100644
--- a/webrtc/modules/video_processing/test/deflickering_test.cc
+++ b/webrtc/modules/video_processing/test/deflickering_test.cc
@@ -22,9 +22,9 @@
namespace webrtc {
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_Deflickering) {
+TEST_P(VideoProcessingTest, DISABLED_Deflickering) {
#else
-TEST_F(VideoProcessingTest, Deflickering) {
+TEST_P(VideoProcessingTest, Deflickering) {
#endif
enum { NumRuns = 30 };
uint32_t frameNum = 0;
diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc
index 4c13a05..a59105c 100644
--- a/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -20,7 +20,7 @@
namespace webrtc {
-TEST_F(VideoProcessingTest, CopyMem) {
+TEST_P(VideoProcessingTest, CopyMem) {
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false, nullptr));
std::unique_ptr<DenoiserFilter> df_sse_neon(
DenoiserFilter::Create(true, nullptr));
@@ -40,7 +40,7 @@
EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
}
-TEST_F(VideoProcessingTest, Variance) {
+TEST_P(VideoProcessingTest, Variance) {
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false, nullptr));
std::unique_ptr<DenoiserFilter> df_sse_neon(
DenoiserFilter::Create(true, nullptr));
@@ -64,7 +64,7 @@
EXPECT_EQ(var, df_sse_neon->Variance16x8(src, 16, dst, 16, &sse));
}
-TEST_F(VideoProcessingTest, MbDenoise) {
+TEST_P(VideoProcessingTest, MbDenoise) {
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false, nullptr));
std::unique_ptr<DenoiserFilter> df_sse_neon(
DenoiserFilter::Create(true, nullptr));
@@ -125,7 +125,7 @@
EXPECT_EQ(COPY_BLOCK, decision);
}
-TEST_F(VideoProcessingTest, Denoiser) {
+TEST_P(VideoProcessingTest, Denoiser) {
// Used in swap buffer.
int denoised_frame_toggle = 0;
// Create pure C denoiser.
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
index 0d18d0a..4b5334c 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -59,9 +59,10 @@
VideoProcessingTest::VideoProcessingTest()
: vp_(NULL),
source_file_(NULL),
- width_(352),
+ vtt_(GetParam()),
+ width_(vtt_.width),
half_width_((width_ + 1) / 2),
- height_(288),
+ height_(vtt_.height),
size_y_(width_ * height_),
size_uv_(half_width_ * ((height_ + 1) / 2)),
frame_length_(CalcBufferSize(kI420, width_, height_)) {}
@@ -69,18 +70,15 @@
void VideoProcessingTest::SetUp() {
vp_ = VideoProcessing::Create();
ASSERT_TRUE(vp_ != NULL);
-
video_frame_.CreateEmptyFrame(width_, height_, width_,
half_width_, half_width_);
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
- const std::string video_file =
- webrtc::test::ResourcePath("foreman_cif", "yuv");
- source_file_ = fopen(video_file.c_str(), "rb");
+ source_file_ = fopen(vtt_.file_name.c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL)
- << "Cannot read source file: " + video_file + "\n";
+ << "Cannot read source file: " + vtt_.file_name + "\n";
}
void VideoProcessingTest::TearDown() {
@@ -93,9 +91,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) {
+TEST_P(VideoProcessingTest, DISABLED_HandleNullBuffer) {
#else
-TEST_F(VideoProcessingTest, HandleNullBuffer) {
+TEST_P(VideoProcessingTest, HandleNullBuffer) {
#endif
// TODO(mikhal/stefan): Do we need this one?
VideoProcessing::FrameStats stats;
@@ -111,9 +109,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) {
+TEST_P(VideoProcessingTest, DISABLED_HandleBadStats) {
#else
-TEST_F(VideoProcessingTest, HandleBadStats) {
+TEST_P(VideoProcessingTest, HandleBadStats) {
#endif
VideoProcessing::FrameStats stats;
vp_->ClearFrameStats(&stats);
@@ -129,9 +127,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) {
+TEST_P(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) {
#else
-TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) {
+TEST_P(VideoProcessingTest, IdenticalResultsAfterReset) {
#endif
VideoFrame video_frame2;
VideoProcessing::FrameStats stats;
@@ -166,9 +164,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_FrameStats) {
+TEST_P(VideoProcessingTest, DISABLED_FrameStats) {
#else
-TEST_F(VideoProcessingTest, FrameStats) {
+TEST_P(VideoProcessingTest, FrameStats) {
#endif
VideoProcessing::FrameStats stats;
vp_->ClearFrameStats(&stats);
@@ -195,9 +193,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) {
+TEST_P(VideoProcessingTest, DISABLED_PreprocessorLogic) {
#else
-TEST_F(VideoProcessingTest, PreprocessorLogic) {
+TEST_P(VideoProcessingTest, PreprocessorLogic) {
#endif
// Disable temporal sampling (frame dropping).
vp_->EnableTemporalDecimation(false);
@@ -218,9 +216,9 @@
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_Resampler) {
+TEST_P(VideoProcessingTest, DISABLED_Resampler) {
#else
-TEST_F(VideoProcessingTest, Resampler) {
+TEST_P(VideoProcessingTest, Resampler) {
#endif
enum { NumRuns = 1 };
@@ -307,6 +305,12 @@
printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime));
}
+INSTANTIATE_TEST_CASE_P(ForemanCif,
+ VideoProcessingTest,
+ ::testing::Values(VideoToTest(
+ {webrtc::test::ResourcePath("foreman_cif", "yuv"),
+ 352, 288})));
+
void PreprocessFrameAndVerify(const VideoFrame& source,
int target_width,
int target_height,
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.h b/webrtc/modules/video_processing/test/video_processing_unittest.h
index 3433c6c..1c548d3 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.h
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.h
@@ -20,26 +20,27 @@
namespace webrtc {
-class VideoProcessingTest : public ::testing::Test {
+typedef struct {
+ std::string file_name;
+ int width;
+ int height;
+} VideoToTest;
+
+class VideoProcessingTest : public ::testing::TestWithParam<VideoToTest> {
protected:
VideoProcessingTest();
virtual void SetUp();
virtual void TearDown();
- static void SetUpTestCase() {
- Trace::CreateTrace();
- std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
- ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
- }
- static void TearDownTestCase() { Trace::ReturnTrace(); }
VideoProcessing* vp_;
FILE* source_file_;
VideoFrame video_frame_;
- const int width_;
- const int half_width_;
- const int height_;
- const int size_y_;
- const int size_uv_;
- const size_t frame_length_;
+ VideoToTest vtt_;
+ int width_;
+ int half_width_;
+ int height_;
+ int size_y_;
+ int size_uv_;
+ size_t frame_length_;
};
} // namespace webrtc