Add DecodedImageCallback::Decoded() function with custom decode time value.
On Android, we would like to use MediaCodec output buffers to hold decoded frames until they can be rendered to a texture. There can only be one texture buffer used at the same time and therefore the calculated decode time in VCMTiming will be wrong since that calculation will also include the time where the decoder waited for the upper layers (that depend on network jitter and actual render time) to release the frame.
This new method will be used in
https://codereview.webrtc.org/1422963003/
BUG=webrtc:4993
R=stefan@webrtc.org
TBR=mflodman@webrtc.org
Review URL: https://codereview.webrtc.org/1414693006 .
Cr-Commit-Position: refs/heads/master@{#10576}
diff --git a/webrtc/modules/video_coding/main/source/timing.cc b/webrtc/modules/video_coding/main/source/timing.cc
index 8d59135..d8ab96d 100644
--- a/webrtc/modules/video_coding/main/source/timing.cc
+++ b/webrtc/modules/video_coding/main/source/timing.cc
@@ -165,13 +165,13 @@
}
int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
- int64_t start_time_ms,
+ int32_t decode_time_ms,
int64_t now_ms,
int64_t render_time_ms) {
CriticalSectionScoped cs(crit_sect_);
- int32_t time_diff_ms = codec_timer_.StopTimer(start_time_ms, now_ms);
- assert(time_diff_ms >= 0);
- last_decode_ms_ = time_diff_ms;
+ codec_timer_.MaxFilter(decode_time_ms, now_ms);
+ assert(decode_time_ms >= 0);
+ last_decode_ms_ = decode_time_ms;
// Update stats.
++num_decoded_frames_;