blob: 5322177165d55ed67092ce6a719669c81806d8dd [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include <vector>
30
31#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
32#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33#include "talk/app/webrtc/java/jni/classreferenceholder.h"
34#include "talk/app/webrtc/java/jni/native_handle_impl.h"
35#include "webrtc/base/bind.h"
36#include "webrtc/base/checks.h"
37#include "webrtc/base/logging.h"
38#include "webrtc/base/thread.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000039#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
40#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
41#include "webrtc/system_wrappers/interface/tick_util.h"
42#include "third_party/libyuv/include/libyuv/convert.h"
43#include "third_party/libyuv/include/libyuv/convert_from.h"
44#include "third_party/libyuv/include/libyuv/video_common.h"
45
46using rtc::Bind;
47using rtc::Thread;
48using rtc::ThreadManager;
49using rtc::scoped_ptr;
50
51using webrtc::CodecSpecificInfo;
52using webrtc::DecodedImageCallback;
53using webrtc::EncodedImage;
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -070054using webrtc::VideoFrame;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000055using webrtc::RTPFragmentationHeader;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000056using webrtc::TickTime;
57using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000058using webrtc::VideoCodecType;
59using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000060using webrtc::kVideoCodecVP8;
61
62namespace webrtc_jni {
63
64jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
65
66class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
67 public rtc::MessageHandler {
68 public:
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000069 explicit MediaCodecVideoDecoder(JNIEnv* jni, VideoCodecType codecType);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000070 virtual ~MediaCodecVideoDecoder();
71
72 static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
73
74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
75 override;
76
77 int32_t Decode(
78 const EncodedImage& inputImage, bool missingFrames,
79 const RTPFragmentationHeader* fragmentation,
80 const CodecSpecificInfo* codecSpecificInfo = NULL,
81 int64_t renderTimeMs = -1) override;
82
83 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
84 override;
85
86 int32_t Release() override;
87
88 int32_t Reset() override;
89 // rtc::MessageHandler implementation.
90 void OnMessage(rtc::Message* msg) override;
91
92 private:
93 // CHECK-fail if not running on |codec_thread_|.
94 void CheckOnCodecThread();
95
96 int32_t InitDecodeOnCodecThread();
97 int32_t ReleaseOnCodecThread();
98 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
99 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
100 // true on success.
101 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
Alex Glaznev782671f2015-06-12 16:40:44 -0700102 int32_t ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000103
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000104 // Type of video codec.
105 VideoCodecType codecType_;
106
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000107 bool key_frame_required_;
108 bool inited_;
Alex Glaznev782671f2015-06-12 16:40:44 -0700109 bool sw_fallback_required_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000110 bool use_surface_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000111 VideoCodec codec_;
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -0700112 VideoFrame decoded_image_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000113 NativeHandleImpl native_handle_;
114 DecodedImageCallback* callback_;
115 int frames_received_; // Number of frames received by decoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000116 int frames_decoded_; // Number of frames decoded by decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000117 int64_t start_time_ms_; // Start time for statistics.
118 int current_frames_; // Number of frames in the current statistics interval.
119 int current_bytes_; // Encoded bytes in the current statistics interval.
120 int current_decoding_time_ms_; // Overall decoding time in the current second
121 uint32_t max_pending_frames_; // Maximum number of pending input frames
122 std::vector<int32_t> timestamps_;
123 std::vector<int64_t> ntp_times_ms_;
124 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
125 // decoder input.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000126 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
127 int64_t output_ntp_time_ms_; // Last output frame ntp time from
128 // ntp_times_ms_ queue.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000129
130 // State that is constant for the lifetime of this object once the ctor
131 // returns.
132 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
133 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
134 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
135 jmethodID j_init_decode_method_;
136 jmethodID j_release_method_;
137 jmethodID j_dequeue_input_buffer_method_;
138 jmethodID j_queue_input_buffer_method_;
139 jmethodID j_dequeue_output_buffer_method_;
140 jmethodID j_release_output_buffer_method_;
141 // MediaCodecVideoDecoder fields.
142 jfieldID j_input_buffers_field_;
143 jfieldID j_output_buffers_field_;
144 jfieldID j_color_format_field_;
145 jfieldID j_width_field_;
146 jfieldID j_height_field_;
147 jfieldID j_stride_field_;
148 jfieldID j_slice_height_field_;
149 jfieldID j_surface_texture_field_;
150 jfieldID j_textureID_field_;
151 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
152 jfieldID j_info_index_field_;
153 jfieldID j_info_offset_field_;
154 jfieldID j_info_size_field_;
155 jfieldID j_info_presentation_timestamp_us_field_;
156
157 // Global references; must be deleted in Release().
158 std::vector<jobject> input_buffers_;
159 jobject surface_texture_;
160 jobject previous_surface_texture_;
161};
162
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000163MediaCodecVideoDecoder::MediaCodecVideoDecoder(
164 JNIEnv* jni, VideoCodecType codecType) :
165 codecType_(codecType),
166 key_frame_required_(true),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000167 inited_(false),
Alex Glaznev782671f2015-06-12 16:40:44 -0700168 sw_fallback_required_(false),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000169 surface_texture_(NULL),
170 previous_surface_texture_(NULL),
171 codec_thread_(new Thread()),
172 j_media_codec_video_decoder_class_(
173 jni,
174 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
175 j_media_codec_video_decoder_(
176 jni,
177 jni->NewObject(*j_media_codec_video_decoder_class_,
178 GetMethodID(jni,
179 *j_media_codec_video_decoder_class_,
180 "<init>",
181 "()V"))) {
182 ScopedLocalRefFrame local_ref_frame(jni);
183 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
184 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
185
186 j_init_decode_method_ = GetMethodID(
187 jni, *j_media_codec_video_decoder_class_, "initDecode",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000188 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
Alex Glaznev782671f2015-06-12 16:40:44 -0700189 "IIZLandroid/opengl/EGLContext;)Z");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000190 j_release_method_ =
191 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
192 j_dequeue_input_buffer_method_ = GetMethodID(
193 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
194 j_queue_input_buffer_method_ = GetMethodID(
195 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
196 j_dequeue_output_buffer_method_ = GetMethodID(
197 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
198 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
199 j_release_output_buffer_method_ = GetMethodID(
200 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
201
202 j_input_buffers_field_ = GetFieldID(
203 jni, *j_media_codec_video_decoder_class_,
204 "inputBuffers", "[Ljava/nio/ByteBuffer;");
205 j_output_buffers_field_ = GetFieldID(
206 jni, *j_media_codec_video_decoder_class_,
207 "outputBuffers", "[Ljava/nio/ByteBuffer;");
208 j_color_format_field_ = GetFieldID(
209 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
210 j_width_field_ = GetFieldID(
211 jni, *j_media_codec_video_decoder_class_, "width", "I");
212 j_height_field_ = GetFieldID(
213 jni, *j_media_codec_video_decoder_class_, "height", "I");
214 j_stride_field_ = GetFieldID(
215 jni, *j_media_codec_video_decoder_class_, "stride", "I");
216 j_slice_height_field_ = GetFieldID(
217 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
218 j_textureID_field_ = GetFieldID(
219 jni, *j_media_codec_video_decoder_class_, "textureID", "I");
220 j_surface_texture_field_ = GetFieldID(
221 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
222 "Landroid/graphics/SurfaceTexture;");
223
224 jclass j_decoder_output_buffer_info_class = FindClass(jni,
225 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
226 j_info_index_field_ = GetFieldID(
227 jni, j_decoder_output_buffer_info_class, "index", "I");
228 j_info_offset_field_ = GetFieldID(
229 jni, j_decoder_output_buffer_info_class, "offset", "I");
230 j_info_size_field_ = GetFieldID(
231 jni, j_decoder_output_buffer_info_class, "size", "I");
232 j_info_presentation_timestamp_us_field_ = GetFieldID(
233 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
234
235 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
236 use_surface_ = true;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000237 if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000238 use_surface_ = false;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000239 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000240 memset(&codec_, 0, sizeof(codec_));
241 AllowBlockingCalls();
242}
243
244MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
245 // Call Release() to ensure no more callbacks to us after we are deleted.
246 Release();
247 // Delete global references.
248 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000249 if (previous_surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000250 jni->DeleteGlobalRef(previous_surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000251 }
252 if (surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000253 jni->DeleteGlobalRef(surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000254 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000255}
256
257int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
258 int32_t numberOfCores) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700259 ALOGD("InitDecode.");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000260 if (inst == NULL) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000261 ALOGE("NULL VideoCodec instance");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000262 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
263 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000264 // Factory should guard against other codecs being used with us.
265 CHECK(inst->codecType == codecType_) << "Unsupported codec " <<
266 inst->codecType << " for " << codecType_;
267
Alex Glaznev782671f2015-06-12 16:40:44 -0700268 if (sw_fallback_required_) {
269 ALOGE("InitDecode() - fallback to SW decoder");
270 return WEBRTC_VIDEO_CODEC_OK;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000271 }
272 // Save VideoCodec instance for later.
273 if (&codec_ != inst) {
274 codec_ = *inst;
275 }
276 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
277
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000278 // Call Java init.
279 return codec_thread_->Invoke<int32_t>(
280 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
281}
282
283int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
284 CheckOnCodecThread();
285 JNIEnv* jni = AttachCurrentThreadIfNeeded();
286 ScopedLocalRefFrame local_ref_frame(jni);
Alex Glaznev782671f2015-06-12 16:40:44 -0700287 ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d.",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000288 (int)codecType_, codec_.width, codec_.height,
Alex Glaznev782671f2015-06-12 16:40:44 -0700289 codec_.maxFramerate);
290
291 // Release previous codec first if it was allocated before.
292 int ret_val = ReleaseOnCodecThread();
293 if (ret_val < 0) {
294 ALOGE("Release failure: %d - fallback to SW codec", ret_val);
295 sw_fallback_required_ = true;
296 return WEBRTC_VIDEO_CODEC_ERROR;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000297 }
298
Alex Glaznev782671f2015-06-12 16:40:44 -0700299 // Always start with a complete key frame.
300 key_frame_required_ = true;
301 frames_received_ = 0;
302 frames_decoded_ = 0;
303
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000304 jobject j_video_codec_enum = JavaEnumFromIndex(
305 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000306 bool success = jni->CallBooleanMethod(
307 *j_media_codec_video_decoder_,
308 j_init_decode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000309 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000310 codec_.width,
311 codec_.height,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000312 use_surface_,
313 MediaCodecVideoDecoderFactory::render_egl_context_);
Alex Glaznev782671f2015-06-12 16:40:44 -0700314 if (CheckException(jni) || !success) {
315 ALOGE("Codec initialization error - fallback to SW codec.");
316 sw_fallback_required_ = true;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000317 return WEBRTC_VIDEO_CODEC_ERROR;
318 }
319 inited_ = true;
320
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000321 switch (codecType_) {
322 case kVideoCodecVP8:
323 max_pending_frames_ = kMaxPendingFramesVp8;
324 break;
325 case kVideoCodecH264:
326 max_pending_frames_ = kMaxPendingFramesH264;
327 break;
328 default:
329 max_pending_frames_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000330 }
331 start_time_ms_ = GetCurrentTimeMs();
332 current_frames_ = 0;
333 current_bytes_ = 0;
334 current_decoding_time_ms_ = 0;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000335 output_timestamp_ = 0;
336 output_ntp_time_ms_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000337 timestamps_.clear();
338 ntp_times_ms_.clear();
339 frame_rtc_times_ms_.clear();
340
341 jobjectArray input_buffers = (jobjectArray)GetObjectField(
342 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
343 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
344 input_buffers_.resize(num_input_buffers);
345 for (size_t i = 0; i < num_input_buffers; ++i) {
346 input_buffers_[i] =
347 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
Alex Glaznev782671f2015-06-12 16:40:44 -0700348 if (CheckException(jni)) {
349 ALOGE("NewGlobalRef error - fallback to SW codec.");
350 sw_fallback_required_ = true;
351 return WEBRTC_VIDEO_CODEC_ERROR;
352 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000353 }
354
355 if (use_surface_) {
356 jobject surface_texture = GetObjectField(
357 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
358 if (previous_surface_texture_ != NULL) {
359 jni->DeleteGlobalRef(previous_surface_texture_);
360 }
361 previous_surface_texture_ = surface_texture_;
362 surface_texture_ = jni->NewGlobalRef(surface_texture);
363 }
364 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
365
366 return WEBRTC_VIDEO_CODEC_OK;
367}
368
369int32_t MediaCodecVideoDecoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000370 ALOGD("DecoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000371 return codec_thread_->Invoke<int32_t>(
372 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
373}
374
375int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
376 if (!inited_) {
377 return WEBRTC_VIDEO_CODEC_OK;
378 }
379 CheckOnCodecThread();
380 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000381 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000382 ScopedLocalRefFrame local_ref_frame(jni);
383 for (size_t i = 0; i < input_buffers_.size(); i++) {
384 jni->DeleteGlobalRef(input_buffers_[i]);
385 }
386 input_buffers_.clear();
387 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000388 inited_ = false;
Alex Glaznev782671f2015-06-12 16:40:44 -0700389 rtc::MessageQueueManager::Clear(this);
390 if (CheckException(jni)) {
391 ALOGE("Decoder release exception");
392 return WEBRTC_VIDEO_CODEC_ERROR;
393 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000394 return WEBRTC_VIDEO_CODEC_OK;
395}
396
397void MediaCodecVideoDecoder::CheckOnCodecThread() {
398 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
399 << "Running on wrong thread!";
400}
401
Alex Glaznev782671f2015-06-12 16:40:44 -0700402int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
403 CheckOnCodecThread();
404 int ret_val = ReleaseOnCodecThread();
405 if (ret_val < 0) {
406 ALOGE("ProcessHWError: Release failure");
407 }
408 if (codecType_ == kVideoCodecH264) {
409 // For now there is no SW H.264 which can be used as fallback codec.
410 // So try to restart hw codec for now.
411 ret_val = InitDecodeOnCodecThread();
412 ALOGE("Reset H.264 codec done. Status: %d", ret_val);
413 if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
414 // H.264 codec was succesfully reset - return regular error code.
415 return WEBRTC_VIDEO_CODEC_ERROR;
416 } else {
417 // Fail to restart H.264 codec - return error code which should stop the
418 // call.
419 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
420 }
421 } else {
422 sw_fallback_required_ = true;
423 ALOGE("Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE");
424 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
425 }
426}
427
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000428int32_t MediaCodecVideoDecoder::Decode(
429 const EncodedImage& inputImage,
430 bool missingFrames,
431 const RTPFragmentationHeader* fragmentation,
432 const CodecSpecificInfo* codecSpecificInfo,
433 int64_t renderTimeMs) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700434 if (sw_fallback_required_) {
435 ALOGE("Decode() - fallback to SW codec");
436 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000437 }
438 if (callback_ == NULL) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700439 ALOGE("Decode() - callback_ is NULL");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000440 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
441 }
442 if (inputImage._buffer == NULL && inputImage._length > 0) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700443 ALOGE("Decode() - inputImage is incorrect");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000444 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
445 }
Alex Glaznev782671f2015-06-12 16:40:44 -0700446 if (!inited_) {
447 ALOGE("Decode() - decoder is not initialized");
448 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
449 }
450
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000451 // Check if encoded frame dimension has changed.
452 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
453 (inputImage._encodedWidth != codec_.width ||
454 inputImage._encodedHeight != codec_.height)) {
455 codec_.width = inputImage._encodedWidth;
456 codec_.height = inputImage._encodedHeight;
Alex Glaznev782671f2015-06-12 16:40:44 -0700457 int32_t ret = InitDecode(&codec_, 1);
458 if (ret < 0) {
459 ALOGE("InitDecode failure: %d - fallback to SW codec", ret);
460 sw_fallback_required_ = true;
461 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
462 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000463 }
464
465 // Always start with a complete key frame.
466 if (key_frame_required_) {
467 if (inputImage._frameType != webrtc::kKeyFrame) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700468 ALOGE("Decode() - key frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000469 return WEBRTC_VIDEO_CODEC_ERROR;
470 }
471 if (!inputImage._completeFrame) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700472 ALOGE("Decode() - complete frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000473 return WEBRTC_VIDEO_CODEC_ERROR;
474 }
475 key_frame_required_ = false;
476 }
477 if (inputImage._length == 0) {
478 return WEBRTC_VIDEO_CODEC_ERROR;
479 }
480
481 return codec_thread_->Invoke<int32_t>(Bind(
482 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
483}
484
485int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
486 const EncodedImage& inputImage) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000487 CheckOnCodecThread();
488 JNIEnv* jni = AttachCurrentThreadIfNeeded();
489 ScopedLocalRefFrame local_ref_frame(jni);
490
491 // Try to drain the decoder and wait until output is not too
492 // much behind the input.
493 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000494 ALOGV("Received: %d. Decoded: %d. Wait for output...",
495 frames_received_, frames_decoded_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000496 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700497 ALOGE("DeliverPendingOutputs error");
498 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000499 }
500 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
501 ALOGE("Output buffer dequeue timeout");
Alex Glaznev782671f2015-06-12 16:40:44 -0700502 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000503 }
504 }
505
506 // Get input buffer.
507 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
508 j_dequeue_input_buffer_method_);
Alex Glaznev782671f2015-06-12 16:40:44 -0700509 if (CheckException(jni) || j_input_buffer_index < 0) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000510 ALOGE("dequeueInputBuffer error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700511 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000512 }
513
514 // Copy encoded data to Java ByteBuffer.
515 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
516 uint8* buffer =
517 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
518 CHECK(buffer) << "Indirect buffer??";
519 int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
Alex Glaznev782671f2015-06-12 16:40:44 -0700520 if (CheckException(jni) || buffer_capacity < inputImage._length) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000521 ALOGE("Input frame size %d is bigger than buffer size %d.",
522 inputImage._length, buffer_capacity);
Alex Glaznev782671f2015-06-12 16:40:44 -0700523 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000524 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000525 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
526 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
527 frames_received_, inputImage._frameType, j_input_buffer_index,
528 timestamp_us / 1000, inputImage._length);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000529 memcpy(buffer, inputImage._buffer, inputImage._length);
530
531 // Save input image timestamps for later output.
532 frames_received_++;
533 current_bytes_ += inputImage._length;
534 timestamps_.push_back(inputImage._timeStamp);
535 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
536 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
537
538 // Feed input to decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000539 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
540 j_queue_input_buffer_method_,
541 j_input_buffer_index,
542 inputImage._length,
543 timestamp_us);
Alex Glaznev782671f2015-06-12 16:40:44 -0700544 if (CheckException(jni) || !success) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000545 ALOGE("queueInputBuffer error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700546 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000547 }
548
549 // Try to drain the decoder
550 if (!DeliverPendingOutputs(jni, 0)) {
551 ALOGE("DeliverPendingOutputs error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700552 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000553 }
554
555 return WEBRTC_VIDEO_CODEC_OK;
556}
557
558bool MediaCodecVideoDecoder::DeliverPendingOutputs(
559 JNIEnv* jni, int dequeue_timeout_us) {
560 if (frames_received_ <= frames_decoded_) {
561 // No need to query for output buffers - decoder is drained.
562 return true;
563 }
564 // Get decoder output.
565 jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
566 *j_media_codec_video_decoder_,
567 j_dequeue_output_buffer_method_,
568 dequeue_timeout_us);
Alex Glaznev782671f2015-06-12 16:40:44 -0700569 if (CheckException(jni)) {
570 return false;
571 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000572 if (IsNull(jni, j_decoder_output_buffer_info)) {
573 return true;
574 }
575
576 // Extract output buffer info from Java DecoderOutputBufferInfo.
577 int output_buffer_index =
578 GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
579 if (output_buffer_index < 0) {
580 ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
581 return false;
582 }
583 int output_buffer_offset =
584 GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
585 int output_buffer_size =
586 GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000587 long output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer_info,
588 j_info_presentation_timestamp_us_field_) / 1000;
Alex Glaznev782671f2015-06-12 16:40:44 -0700589 if (CheckException(jni)) {
590 return false;
591 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000592
593 // Get decoded video frame properties.
594 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
595 j_color_format_field_);
596 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
597 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
598 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
599 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
600 j_slice_height_field_);
601 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
602 j_textureID_field_);
603
604 // Extract data from Java ByteBuffer and create output yuv420 frame -
605 // for non surface decoding only.
606 if (!use_surface_) {
607 if (output_buffer_size < width * height * 3 / 2) {
608 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
609 return false;
610 }
611 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
612 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
613 jobject output_buffer =
614 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
615 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
616 output_buffer));
Alex Glaznev782671f2015-06-12 16:40:44 -0700617 if (CheckException(jni)) {
618 return false;
619 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000620 payload += output_buffer_offset;
621
622 // Create yuv420 frame.
623 if (color_format == COLOR_FormatYUV420Planar) {
624 decoded_image_.CreateFrame(
hbos@webrtc.org93d9d652015-03-16 13:26:00 +0000625 payload,
626 payload + (stride * slice_height),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000627 payload + (5 * stride * slice_height / 4),
628 width, height,
629 stride, stride / 2, stride / 2);
630 } else {
631 // All other supported formats are nv12.
632 decoded_image_.CreateEmptyFrame(width, height, width,
633 width / 2, width / 2);
634 libyuv::NV12ToI420(
635 payload, stride,
636 payload + stride * slice_height, stride,
637 decoded_image_.buffer(webrtc::kYPlane),
638 decoded_image_.stride(webrtc::kYPlane),
639 decoded_image_.buffer(webrtc::kUPlane),
640 decoded_image_.stride(webrtc::kUPlane),
641 decoded_image_.buffer(webrtc::kVPlane),
642 decoded_image_.stride(webrtc::kVPlane),
643 width, height);
644 }
645 }
646
647 // Get frame timestamps from a queue.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000648 if (timestamps_.size() > 0) {
649 output_timestamp_ = timestamps_.front();
650 timestamps_.erase(timestamps_.begin());
651 }
652 if (ntp_times_ms_.size() > 0) {
653 output_ntp_time_ms_ = ntp_times_ms_.front();
654 ntp_times_ms_.erase(ntp_times_ms_.begin());
655 }
656 int64_t frame_decoding_time_ms = 0;
657 if (frame_rtc_times_ms_.size() > 0) {
658 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
659 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
660 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000661 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000662 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000663 color_format, output_timestamps_ms, frame_decoding_time_ms);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000664
665 // Return output buffer back to codec.
666 bool success = jni->CallBooleanMethod(
667 *j_media_codec_video_decoder_,
668 j_release_output_buffer_method_,
669 output_buffer_index,
670 use_surface_);
Alex Glaznev782671f2015-06-12 16:40:44 -0700671 if (CheckException(jni) || !success) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000672 ALOGE("releaseOutputBuffer error");
673 return false;
674 }
675
676 // Calculate and print decoding statistics - every 3 seconds.
677 frames_decoded_++;
678 current_frames_++;
679 current_decoding_time_ms_ += frame_decoding_time_ms;
680 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
681 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
682 current_frames_ > 0) {
683 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
684 current_bytes_ * 8 / statistic_time_ms,
685 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
686 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
687 start_time_ms_ = GetCurrentTimeMs();
688 current_frames_ = 0;
689 current_bytes_ = 0;
690 current_decoding_time_ms_ = 0;
691 }
692
693 // Callback - output decoded frame.
694 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
695 if (use_surface_) {
696 native_handle_.SetTextureObject(surface_texture_, texture_id);
Peter Boströmeb66e802015-06-05 11:08:03 +0200697 VideoFrame texture_image(new rtc::RefCountedObject<JniNativeHandleBuffer>(
698 &native_handle_, width, height),
699 output_timestamp_, 0, webrtc::kVideoRotation_0);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000700 texture_image.set_ntp_time_ms(output_ntp_time_ms_);
magjed@webrtc.org2056ee32015-03-16 13:46:52 +0000701 callback_status = callback_->Decoded(texture_image);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000702 } else {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000703 decoded_image_.set_timestamp(output_timestamp_);
704 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
magjed@webrtc.org2056ee32015-03-16 13:46:52 +0000705 callback_status = callback_->Decoded(decoded_image_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000706 }
707 if (callback_status > 0) {
708 ALOGE("callback error");
709 }
710
711 return true;
712}
713
714int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
715 DecodedImageCallback* callback) {
716 callback_ = callback;
717 return WEBRTC_VIDEO_CODEC_OK;
718}
719
720int32_t MediaCodecVideoDecoder::Reset() {
721 ALOGD("DecoderReset");
722 if (!inited_) {
723 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
724 }
725 return InitDecode(&codec_, 1);
726}
727
728void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
729 JNIEnv* jni = AttachCurrentThreadIfNeeded();
730 ScopedLocalRefFrame local_ref_frame(jni);
731 if (!inited_) {
732 return;
733 }
734 // We only ever send one message to |this| directly (not through a Bind()'d
735 // functor), so expect no ID/data.
736 CHECK(!msg->message_id) << "Unexpected message!";
737 CHECK(!msg->pdata) << "Unexpected message!";
738 CheckOnCodecThread();
739
740 if (!DeliverPendingOutputs(jni, 0)) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700741 ALOGE("OnMessage: DeliverPendingOutputs error");
742 ProcessHWErrorOnCodecThread();
743 return;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000744 }
745 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
746}
747
748int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
749 jobject render_egl_context) {
750 ALOGD("SetAndroidObjects for surface decoding.");
751 if (render_egl_context_) {
752 jni->DeleteGlobalRef(render_egl_context_);
753 }
754 if (IsNull(jni, render_egl_context)) {
755 render_egl_context_ = NULL;
756 } else {
757 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
Alex Glaznev782671f2015-06-12 16:40:44 -0700758 if (CheckException(jni)) {
759 ALOGE("error calling NewGlobalRef for EGL Context.");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000760 render_egl_context_ = NULL;
Alex Glaznev782671f2015-06-12 16:40:44 -0700761 } else {
762 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
763 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
764 ALOGE("Wrong EGL Context.");
765 jni->DeleteGlobalRef(render_egl_context_);
766 render_egl_context_ = NULL;
767 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000768 }
769 }
770 if (render_egl_context_ == NULL) {
771 ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
772 }
773 return 0;
774}
775
776MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
777 JNIEnv* jni = AttachCurrentThreadIfNeeded();
778 ScopedLocalRefFrame local_ref_frame(jni);
779 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000780 supported_codec_types_.clear();
781
782 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000783 j_decoder_class,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000784 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
Alex Glaznev782671f2015-06-12 16:40:44 -0700785 if (CheckException(jni)) {
786 is_vp8_hw_supported = false;
787 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000788 if (is_vp8_hw_supported) {
789 ALOGD("VP8 HW Decoder supported.");
790 supported_codec_types_.push_back(kVideoCodecVP8);
791 }
792
793 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
794 j_decoder_class,
795 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
Alex Glaznev782671f2015-06-12 16:40:44 -0700796 if (CheckException(jni)) {
797 is_h264_hw_supported = false;
798 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000799 if (is_h264_hw_supported) {
800 ALOGD("H264 HW Decoder supported.");
801 supported_codec_types_.push_back(kVideoCodecH264);
802 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000803}
804
805MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
806
807webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000808 VideoCodecType type) {
809 if (supported_codec_types_.empty()) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000810 return NULL;
811 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000812 for (std::vector<VideoCodecType>::const_iterator it =
813 supported_codec_types_.begin(); it != supported_codec_types_.end();
814 ++it) {
815 if (*it == type) {
816 ALOGD("Create HW video decoder for type %d.", (int)type);
817 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type);
818 }
819 }
820 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000821}
822
823void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
824 webrtc::VideoDecoder* decoder) {
825 delete decoder;
826}
827
828} // namespace webrtc_jni
829