niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
pbos@webrtc.org | 6f3d8fc | 2013-05-27 14:12:16 +0000 | [diff] [blame] | 11 | #include "webrtc/modules/video_processing/main/source/deflickering.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 12 | |
| 13 | #include <math.h> |
| 14 | #include <stdlib.h> |
| 15 | |
pbos@webrtc.org | 6f3d8fc | 2013-05-27 14:12:16 +0000 | [diff] [blame] | 16 | #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" |
| 17 | #include "webrtc/system_wrappers/interface/sort.h" |
| 18 | #include "webrtc/system_wrappers/interface/trace.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 19 | |
| 20 | namespace webrtc { |
| 21 | |
| 22 | // Detection constants |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 23 | // (Q4) Maximum allowed deviation for detection. |
| 24 | enum { kFrequencyDeviation = 39 }; |
| 25 | // (Q4) Minimum frequency that can be detected. |
| 26 | enum { kMinFrequencyToDetect = 32 }; |
| 27 | // Number of flickers before we accept detection |
| 28 | enum { kNumFlickerBeforeDetect = 2 }; |
| 29 | enum { kmean_valueScaling = 4 }; // (Q4) In power of 2 |
| 30 | // Dead-zone region in terms of pixel values |
| 31 | enum { kZeroCrossingDeadzone = 10 }; |
| 32 | // Deflickering constants. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 33 | // Compute the quantiles over 1 / DownsamplingFactor of the image. |
| 34 | enum { kDownsamplingFactor = 8 }; |
| 35 | enum { kLog2OfDownsamplingFactor = 3 }; |
| 36 | |
| 37 | // To generate in Matlab: |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 38 | // >> probUW16 = round(2^11 * |
| 39 | // [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 40 | // >> fprintf('%d, ', probUW16) |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 41 | // Resolution reduced to avoid overflow when multiplying with the |
| 42 | // (potentially) large number of pixels. |
| 43 | const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {102, 205, 410, 614, |
| 44 | 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 45 | |
| 46 | // To generate in Matlab: |
| 47 | // >> numQuants = 14; maxOnlyLength = 5; |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 48 | // >> weightUW16 = round(2^15 * |
| 49 | // [linspace(0.5, 1.0, numQuants - maxOnlyLength)]); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 50 | // >> fprintf('%d, %d,\n ', weightUW16); |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 51 | const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] = |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 52 | {16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15> |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 53 | |
| 54 | VPMDeflickering::VPMDeflickering() |
| 55 | : id_(0) { |
| 56 | Reset(); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 57 | } |
| 58 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 59 | VPMDeflickering::~VPMDeflickering() {} |
| 60 | |
| 61 | int32_t VPMDeflickering::ChangeUniqueId(const int32_t id) { |
| 62 | id_ = id; |
| 63 | return 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 64 | } |
| 65 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 66 | void VPMDeflickering::Reset() { |
| 67 | mean_buffer_length_ = 0; |
| 68 | detection_state_ = 0; |
| 69 | frame_rate_ = 0; |
| 70 | |
| 71 | memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength); |
| 72 | memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength); |
| 73 | |
| 74 | // Initialize the history with a uniformly distributed histogram. |
| 75 | quant_hist_uw8_[0][0] = 0; |
| 76 | quant_hist_uw8_[0][kNumQuants - 1] = 255; |
| 77 | for (int32_t i = 0; i < kNumProbs; i++) { |
| 78 | quant_hist_uw8_[0][i + 1] = static_cast<uint8_t>((WEBRTC_SPL_UMUL_16_16( |
| 79 | prob_uw16_[i], 255) + (1 << 10)) >> 11); // Unsigned round. <Q0> |
| 80 | } |
| 81 | |
| 82 | for (int32_t i = 1; i < kFrameHistory_size; i++) { |
| 83 | memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0], |
| 84 | sizeof(uint8_t) * kNumQuants); |
| 85 | } |
| 86 | } |
| 87 | |
| 88 | int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame, |
| 89 | VideoProcessingModule::FrameStats* stats) { |
| 90 | assert(frame); |
| 91 | uint32_t frame_memory; |
| 92 | uint8_t quant_uw8[kNumQuants]; |
| 93 | uint8_t maxquant_uw8[kNumQuants]; |
| 94 | uint8_t minquant_uw8[kNumQuants]; |
| 95 | uint16_t target_quant_uw16[kNumQuants]; |
| 96 | uint16_t increment_uw16; |
| 97 | uint8_t map_uw8[256]; |
| 98 | |
| 99 | uint16_t tmp_uw16; |
| 100 | uint32_t tmp_uw32; |
| 101 | int width = frame->width(); |
| 102 | int height = frame->height(); |
| 103 | |
| 104 | if (frame->IsZeroSize()) { |
| 105 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, |
| 106 | "Null frame pointer"); |
| 107 | return VPM_GENERAL_ERROR; |
| 108 | } |
| 109 | |
| 110 | // Stricter height check due to subsampling size calculation below. |
| 111 | if (height < 2) { |
| 112 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, |
| 113 | "Invalid frame size"); |
| 114 | return VPM_GENERAL_ERROR; |
| 115 | } |
| 116 | |
| 117 | if (!VideoProcessingModule::ValidFrameStats(*stats)) { |
| 118 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, |
| 119 | "Invalid frame stats"); |
| 120 | return VPM_GENERAL_ERROR; |
| 121 | } |
| 122 | |
| 123 | if (PreDetection(frame->timestamp(), *stats) == -1) return VPM_GENERAL_ERROR; |
| 124 | |
| 125 | // Flicker detection |
| 126 | int32_t det_flicker = DetectFlicker(); |
| 127 | if (det_flicker < 0) { |
| 128 | return VPM_GENERAL_ERROR; |
| 129 | } else if (det_flicker != 1) { |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 130 | return 0; |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 131 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 132 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 133 | // Size of luminance component. |
| 134 | const uint32_t y_size = height * width; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 135 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 136 | const uint32_t y_sub_size = width * (((height - 1) >> |
| 137 | kLog2OfDownsamplingFactor) + 1); |
| 138 | uint8_t* y_sorted = new uint8_t[y_sub_size]; |
| 139 | uint32_t sort_row_idx = 0; |
| 140 | for (int i = 0; i < height; i += kDownsamplingFactor) { |
| 141 | memcpy(y_sorted + sort_row_idx * width, |
| 142 | frame->buffer(kYPlane) + i * width, width); |
| 143 | sort_row_idx++; |
| 144 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 145 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 146 | webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8); |
| 147 | |
| 148 | uint32_t prob_idx_uw32 = 0; |
| 149 | quant_uw8[0] = 0; |
| 150 | quant_uw8[kNumQuants - 1] = 255; |
| 151 | |
| 152 | // Ensure we won't get an overflow below. |
| 153 | // In practice, the number of subsampled pixels will not become this large. |
| 154 | if (y_sub_size > (1 << 21) - 1) { |
| 155 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_, |
| 156 | "Subsampled number of pixels too large"); |
| 157 | return -1; |
| 158 | } |
| 159 | |
| 160 | for (int32_t i = 0; i < kNumProbs; i++) { |
| 161 | // <Q0>. |
| 162 | prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11; |
| 163 | quant_uw8[i + 1] = y_sorted[prob_idx_uw32]; |
| 164 | } |
| 165 | |
| 166 | delete [] y_sorted; |
| 167 | y_sorted = NULL; |
| 168 | |
| 169 | // Shift history for new frame. |
| 170 | memmove(quant_hist_uw8_[1], quant_hist_uw8_[0], |
| 171 | (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t)); |
| 172 | // Store current frame in history. |
| 173 | memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t)); |
| 174 | |
| 175 | // We use a frame memory equal to the ceiling of half the frame rate to |
| 176 | // ensure we capture an entire period of flicker. |
| 177 | frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. <Q0> |
| 178 | // frame_rate_ in Q4. |
| 179 | if (frame_memory > kFrameHistory_size) { |
| 180 | frame_memory = kFrameHistory_size; |
| 181 | } |
| 182 | |
| 183 | // Get maximum and minimum. |
| 184 | for (int32_t i = 0; i < kNumQuants; i++) { |
| 185 | maxquant_uw8[i] = 0; |
| 186 | minquant_uw8[i] = 255; |
| 187 | for (uint32_t j = 0; j < frame_memory; j++) { |
| 188 | if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) { |
| 189 | maxquant_uw8[i] = quant_hist_uw8_[j][i]; |
| 190 | } |
| 191 | |
| 192 | if (quant_hist_uw8_[j][i] < minquant_uw8[i]) { |
| 193 | minquant_uw8[i] = quant_hist_uw8_[j][i]; |
| 194 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 195 | } |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 196 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 197 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 198 | // Get target quantiles. |
| 199 | for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) { |
| 200 | target_quant_uw16[i] = static_cast<uint16_t>((WEBRTC_SPL_UMUL_16_16( |
| 201 | weight_uw16_[i], maxquant_uw8[i]) + WEBRTC_SPL_UMUL_16_16((1 << 15) - |
| 202 | weight_uw16_[i], minquant_uw8[i])) >> 8); // <Q7> |
| 203 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 204 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 205 | for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) { |
| 206 | target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7; |
| 207 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 208 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 209 | // Compute the map from input to output pixels. |
| 210 | uint16_t mapUW16; // <Q7> |
| 211 | for (int32_t i = 1; i < kNumQuants; i++) { |
| 212 | // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here. |
| 213 | tmp_uw32 = static_cast<uint32_t>(target_quant_uw16[i] - |
| 214 | target_quant_uw16[i - 1]); |
| 215 | tmp_uw16 = static_cast<uint16_t>(quant_uw8[i] - quant_uw8[i - 1]); // <Q0> |
| 216 | |
| 217 | if (tmp_uw16 > 0) { |
| 218 | increment_uw16 = static_cast<uint16_t>(WebRtcSpl_DivU32U16(tmp_uw32, |
| 219 | tmp_uw16)); // <Q7> |
| 220 | } else { |
| 221 | // The value is irrelevant; the loop below will only iterate once. |
| 222 | increment_uw16 = 0; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 223 | } |
| 224 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 225 | mapUW16 = target_quant_uw16[i - 1]; |
| 226 | for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) { |
| 227 | // Unsigned round. <Q0> |
| 228 | map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7); |
| 229 | mapUW16 += increment_uw16; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 230 | } |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 231 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 232 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 233 | // Map to the output frame. |
| 234 | uint8_t* buffer = frame->buffer(kYPlane); |
| 235 | for (uint32_t i = 0; i < y_size; i++) { |
| 236 | buffer[i] = map_uw8[buffer[i]]; |
| 237 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 238 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 239 | // Frame was altered, so reset stats. |
| 240 | VideoProcessingModule::ClearFrameStats(stats); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 241 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 242 | return VPM_OK; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | /** |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 246 | Performs some pre-detection operations. Must be called before |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 247 | DetectFlicker(). |
| 248 | |
| 249 | \param[in] timestamp Timestamp of the current frame. |
| 250 | \param[in] stats Statistics of the current frame. |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 251 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 252 | \return 0: Success\n |
| 253 | 2: Detection not possible due to flickering frequency too close to |
| 254 | zero.\n |
| 255 | -1: Error |
| 256 | */ |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 257 | int32_t VPMDeflickering::PreDetection(const uint32_t timestamp, |
| 258 | const VideoProcessingModule::FrameStats& stats) { |
| 259 | int32_t mean_val; // Mean value of frame (Q4) |
| 260 | uint32_t frame_rate = 0; |
| 261 | int32_t meanBufferLength; // Temp variable. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 262 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 263 | mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels); |
| 264 | // Update mean value buffer. |
| 265 | // This should be done even though we might end up in an unreliable detection. |
| 266 | memmove(mean_buffer_ + 1, mean_buffer_, |
| 267 | (kMeanBufferLength - 1) * sizeof(int32_t)); |
| 268 | mean_buffer_[0] = mean_val; |
| 269 | |
| 270 | // Update timestamp buffer. |
| 271 | // This should be done even though we might end up in an unreliable detection. |
| 272 | memmove(timestamp_buffer_ + 1, timestamp_buffer_, (kMeanBufferLength - 1) * |
| 273 | sizeof(uint32_t)); |
| 274 | timestamp_buffer_[0] = timestamp; |
| 275 | |
| 276 | /* Compute current frame rate (Q4) */ |
| 277 | if (timestamp_buffer_[kMeanBufferLength - 1] != 0) { |
| 278 | frame_rate = ((90000 << 4) * (kMeanBufferLength - 1)); |
| 279 | frame_rate /= |
| 280 | (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]); |
| 281 | } else if (timestamp_buffer_[1] != 0) { |
| 282 | frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]); |
| 283 | } |
| 284 | |
| 285 | /* Determine required size of mean value buffer (mean_buffer_length_) */ |
| 286 | if (frame_rate == 0) { |
| 287 | meanBufferLength = 1; |
| 288 | } else { |
| 289 | meanBufferLength = |
| 290 | (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect; |
| 291 | } |
| 292 | /* Sanity check of buffer length */ |
| 293 | if (meanBufferLength >= kMeanBufferLength) { |
| 294 | /* Too long buffer. The flickering frequency is too close to zero, which |
| 295 | * makes the estimation unreliable. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 296 | */ |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 297 | mean_buffer_length_ = 0; |
| 298 | return 2; |
| 299 | } |
| 300 | mean_buffer_length_ = meanBufferLength; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 301 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 302 | if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) && |
| 303 | (mean_buffer_length_ != 1)) { |
| 304 | frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1)); |
| 305 | frame_rate /= |
| 306 | (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]); |
| 307 | } else if (timestamp_buffer_[1] != 0) { |
| 308 | frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]); |
| 309 | } |
| 310 | frame_rate_ = frame_rate; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 311 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 312 | return VPM_OK; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 313 | } |
| 314 | |
| 315 | /** |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 316 | This function detects flicker in the video stream. As a side effect the |
| 317 | mean value buffer is updated with the new mean value. |
| 318 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 319 | \return 0: No flickering detected\n |
| 320 | 1: Flickering detected\n |
| 321 | 2: Detection not possible due to unreliable frequency interval |
| 322 | -1: Error |
| 323 | */ |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 324 | int32_t VPMDeflickering::DetectFlicker() { |
| 325 | uint32_t i; |
| 326 | int32_t freqEst; // (Q4) Frequency estimate to base detection upon |
| 327 | int32_t ret_val = -1; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 328 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 329 | /* Sanity check for mean_buffer_length_ */ |
| 330 | if (mean_buffer_length_ < 2) { |
| 331 | /* Not possible to estimate frequency */ |
| 332 | return(2); |
| 333 | } |
| 334 | // Count zero crossings with a dead zone to be robust against noise. If the |
| 335 | // noise std is 2 pixel this corresponds to about 95% confidence interval. |
| 336 | int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4 |
| 337 | int32_t meanOfBuffer = 0; // Mean value of mean value buffer. |
| 338 | int32_t numZeros = 0; // Number of zeros that cross the dead-zone. |
| 339 | int32_t cntState = 0; // State variable for zero crossing regions. |
| 340 | int32_t cntStateOld = 0; // Previous state for zero crossing regions. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 341 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 342 | for (i = 0; i < mean_buffer_length_; i++) { |
| 343 | meanOfBuffer += mean_buffer_[i]; |
| 344 | } |
| 345 | meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation. |
| 346 | meanOfBuffer /= mean_buffer_length_; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 347 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 348 | // Count zero crossings. |
| 349 | cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone)); |
| 350 | cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone)); |
| 351 | for (i = 1; i < mean_buffer_length_; i++) { |
| 352 | cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone)); |
| 353 | cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone)); |
| 354 | if (cntStateOld == 0) { |
| 355 | cntStateOld = -cntState; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 356 | } |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 357 | if (((cntState + cntStateOld) == 0) && (cntState != 0)) { |
| 358 | numZeros++; |
| 359 | cntStateOld = cntState; |
| 360 | } |
| 361 | } |
| 362 | // END count zero crossings. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 363 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 364 | /* Frequency estimation according to: |
| 365 | * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_; |
| 366 | * |
| 367 | * Resolution is set to Q4 |
| 368 | */ |
| 369 | freqEst = ((numZeros * 90000) << 3); |
| 370 | freqEst /= |
| 371 | (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]); |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 372 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 373 | /* Translate frequency estimate to regions close to 100 and 120 Hz */ |
| 374 | uint8_t freqState = 0; // Current translation state; |
| 375 | // (0) Not in interval, |
| 376 | // (1) Within valid interval, |
| 377 | // (2) Out of range |
| 378 | int32_t freqAlias = freqEst; |
| 379 | if (freqEst > kMinFrequencyToDetect) { |
| 380 | uint8_t aliasState = 1; |
| 381 | while(freqState == 0) { |
| 382 | /* Increase frequency */ |
| 383 | freqAlias += (aliasState * frame_rate_); |
| 384 | freqAlias += ((freqEst << 1) * (1 - (aliasState << 1))); |
| 385 | /* Compute state */ |
| 386 | freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation); |
| 387 | freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation); |
| 388 | freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation)); |
| 389 | /* Switch alias state */ |
| 390 | aliasState++; |
| 391 | aliasState &= 0x01; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 392 | } |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 393 | } |
| 394 | /* Is frequency estimate within detection region? */ |
| 395 | if (freqState == 1) { |
| 396 | ret_val = 1; |
| 397 | } else if (freqState == 0) { |
| 398 | ret_val = 2; |
| 399 | } else { |
| 400 | ret_val = 0; |
| 401 | } |
| 402 | return ret_val; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 +0000 | [diff] [blame] | 403 | } |
| 404 | |
mikhal@webrtc.org | b43d807 | 2013-10-03 16:42:41 +0000 | [diff] [blame^] | 405 | } // namespace webrtc |