blob: 2c2c12d34447071fd0641db39150820454cd9066 [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include <vector>
30
31#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
32#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33#include "talk/app/webrtc/java/jni/classreferenceholder.h"
34#include "talk/app/webrtc/java/jni/native_handle_impl.h"
35#include "webrtc/base/bind.h"
36#include "webrtc/base/checks.h"
37#include "webrtc/base/logging.h"
38#include "webrtc/base/thread.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000039#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
40#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
41#include "webrtc/system_wrappers/interface/tick_util.h"
42#include "third_party/libyuv/include/libyuv/convert.h"
43#include "third_party/libyuv/include/libyuv/convert_from.h"
44#include "third_party/libyuv/include/libyuv/video_common.h"
45
46using rtc::Bind;
47using rtc::Thread;
48using rtc::ThreadManager;
49using rtc::scoped_ptr;
50
51using webrtc::CodecSpecificInfo;
52using webrtc::DecodedImageCallback;
53using webrtc::EncodedImage;
54using webrtc::I420VideoFrame;
55using webrtc::RTPFragmentationHeader;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000056using webrtc::TickTime;
57using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000058using webrtc::VideoCodecType;
59using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000060using webrtc::kVideoCodecVP8;
61
62namespace webrtc_jni {
63
64jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
65
66class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
67 public rtc::MessageHandler {
68 public:
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000069 explicit MediaCodecVideoDecoder(JNIEnv* jni, VideoCodecType codecType);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000070 virtual ~MediaCodecVideoDecoder();
71
72 static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
73
74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
75 override;
76
77 int32_t Decode(
78 const EncodedImage& inputImage, bool missingFrames,
79 const RTPFragmentationHeader* fragmentation,
80 const CodecSpecificInfo* codecSpecificInfo = NULL,
81 int64_t renderTimeMs = -1) override;
82
83 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
84 override;
85
86 int32_t Release() override;
87
88 int32_t Reset() override;
89 // rtc::MessageHandler implementation.
90 void OnMessage(rtc::Message* msg) override;
91
92 private:
93 // CHECK-fail if not running on |codec_thread_|.
94 void CheckOnCodecThread();
95
96 int32_t InitDecodeOnCodecThread();
97 int32_t ReleaseOnCodecThread();
98 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
99 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
100 // true on success.
101 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
102
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000103 // Type of video codec.
104 VideoCodecType codecType_;
105
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000106 bool key_frame_required_;
107 bool inited_;
108 bool use_surface_;
109 int error_count_;
110 VideoCodec codec_;
111 I420VideoFrame decoded_image_;
112 NativeHandleImpl native_handle_;
113 DecodedImageCallback* callback_;
114 int frames_received_; // Number of frames received by decoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000115 int frames_decoded_; // Number of frames decoded by decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000116 int64_t start_time_ms_; // Start time for statistics.
117 int current_frames_; // Number of frames in the current statistics interval.
118 int current_bytes_; // Encoded bytes in the current statistics interval.
119 int current_decoding_time_ms_; // Overall decoding time in the current second
120 uint32_t max_pending_frames_; // Maximum number of pending input frames
121 std::vector<int32_t> timestamps_;
122 std::vector<int64_t> ntp_times_ms_;
123 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
124 // decoder input.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000125 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
126 int64_t output_ntp_time_ms_; // Last output frame ntp time from
127 // ntp_times_ms_ queue.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000128
129 // State that is constant for the lifetime of this object once the ctor
130 // returns.
131 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
132 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
133 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
134 jmethodID j_init_decode_method_;
135 jmethodID j_release_method_;
136 jmethodID j_dequeue_input_buffer_method_;
137 jmethodID j_queue_input_buffer_method_;
138 jmethodID j_dequeue_output_buffer_method_;
139 jmethodID j_release_output_buffer_method_;
140 // MediaCodecVideoDecoder fields.
141 jfieldID j_input_buffers_field_;
142 jfieldID j_output_buffers_field_;
143 jfieldID j_color_format_field_;
144 jfieldID j_width_field_;
145 jfieldID j_height_field_;
146 jfieldID j_stride_field_;
147 jfieldID j_slice_height_field_;
148 jfieldID j_surface_texture_field_;
149 jfieldID j_textureID_field_;
150 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
151 jfieldID j_info_index_field_;
152 jfieldID j_info_offset_field_;
153 jfieldID j_info_size_field_;
154 jfieldID j_info_presentation_timestamp_us_field_;
155
156 // Global references; must be deleted in Release().
157 std::vector<jobject> input_buffers_;
158 jobject surface_texture_;
159 jobject previous_surface_texture_;
160};
161
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000162MediaCodecVideoDecoder::MediaCodecVideoDecoder(
163 JNIEnv* jni, VideoCodecType codecType) :
164 codecType_(codecType),
165 key_frame_required_(true),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000166 inited_(false),
167 error_count_(0),
168 surface_texture_(NULL),
169 previous_surface_texture_(NULL),
170 codec_thread_(new Thread()),
171 j_media_codec_video_decoder_class_(
172 jni,
173 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
174 j_media_codec_video_decoder_(
175 jni,
176 jni->NewObject(*j_media_codec_video_decoder_class_,
177 GetMethodID(jni,
178 *j_media_codec_video_decoder_class_,
179 "<init>",
180 "()V"))) {
181 ScopedLocalRefFrame local_ref_frame(jni);
182 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
183 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
184
185 j_init_decode_method_ = GetMethodID(
186 jni, *j_media_codec_video_decoder_class_, "initDecode",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000187 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
188 "IIZZLandroid/opengl/EGLContext;)Z");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000189 j_release_method_ =
190 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
191 j_dequeue_input_buffer_method_ = GetMethodID(
192 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
193 j_queue_input_buffer_method_ = GetMethodID(
194 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
195 j_dequeue_output_buffer_method_ = GetMethodID(
196 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
197 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
198 j_release_output_buffer_method_ = GetMethodID(
199 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
200
201 j_input_buffers_field_ = GetFieldID(
202 jni, *j_media_codec_video_decoder_class_,
203 "inputBuffers", "[Ljava/nio/ByteBuffer;");
204 j_output_buffers_field_ = GetFieldID(
205 jni, *j_media_codec_video_decoder_class_,
206 "outputBuffers", "[Ljava/nio/ByteBuffer;");
207 j_color_format_field_ = GetFieldID(
208 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
209 j_width_field_ = GetFieldID(
210 jni, *j_media_codec_video_decoder_class_, "width", "I");
211 j_height_field_ = GetFieldID(
212 jni, *j_media_codec_video_decoder_class_, "height", "I");
213 j_stride_field_ = GetFieldID(
214 jni, *j_media_codec_video_decoder_class_, "stride", "I");
215 j_slice_height_field_ = GetFieldID(
216 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
217 j_textureID_field_ = GetFieldID(
218 jni, *j_media_codec_video_decoder_class_, "textureID", "I");
219 j_surface_texture_field_ = GetFieldID(
220 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
221 "Landroid/graphics/SurfaceTexture;");
222
223 jclass j_decoder_output_buffer_info_class = FindClass(jni,
224 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
225 j_info_index_field_ = GetFieldID(
226 jni, j_decoder_output_buffer_info_class, "index", "I");
227 j_info_offset_field_ = GetFieldID(
228 jni, j_decoder_output_buffer_info_class, "offset", "I");
229 j_info_size_field_ = GetFieldID(
230 jni, j_decoder_output_buffer_info_class, "size", "I");
231 j_info_presentation_timestamp_us_field_ = GetFieldID(
232 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
233
234 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
235 use_surface_ = true;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000236 if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000237 use_surface_ = false;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000238 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000239 memset(&codec_, 0, sizeof(codec_));
240 AllowBlockingCalls();
241}
242
243MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
244 // Call Release() to ensure no more callbacks to us after we are deleted.
245 Release();
246 // Delete global references.
247 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000248 if (previous_surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000249 jni->DeleteGlobalRef(previous_surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000250 }
251 if (surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000252 jni->DeleteGlobalRef(surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000253 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000254}
255
256int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
257 int32_t numberOfCores) {
258 if (inst == NULL) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000259 ALOGE("NULL VideoCodec instance");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000260 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
261 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000262 // Factory should guard against other codecs being used with us.
263 CHECK(inst->codecType == codecType_) << "Unsupported codec " <<
264 inst->codecType << " for " << codecType_;
265
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000266 int ret_val = Release();
267 if (ret_val < 0) {
268 return ret_val;
269 }
270 // Save VideoCodec instance for later.
271 if (&codec_ != inst) {
272 codec_ = *inst;
273 }
274 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
275
276 // Always start with a complete key frame.
277 key_frame_required_ = true;
278 frames_received_ = 0;
279 frames_decoded_ = 0;
280
281 // Call Java init.
282 return codec_thread_->Invoke<int32_t>(
283 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
284}
285
286int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
287 CheckOnCodecThread();
288 JNIEnv* jni = AttachCurrentThreadIfNeeded();
289 ScopedLocalRefFrame local_ref_frame(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000290 ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d. Errors: %d",
291 (int)codecType_, codec_.width, codec_.height,
292 codec_.maxFramerate, error_count_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000293 bool use_sw_codec = false;
294 if (error_count_ > 1) {
295 // If more than one critical errors happen for HW codec, switch to SW codec.
296 use_sw_codec = true;
297 }
298
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000299 jobject j_video_codec_enum = JavaEnumFromIndex(
300 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000301 bool success = jni->CallBooleanMethod(
302 *j_media_codec_video_decoder_,
303 j_init_decode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000304 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000305 codec_.width,
306 codec_.height,
307 use_sw_codec,
308 use_surface_,
309 MediaCodecVideoDecoderFactory::render_egl_context_);
310 CHECK_EXCEPTION(jni);
311 if (!success) {
312 return WEBRTC_VIDEO_CODEC_ERROR;
313 }
314 inited_ = true;
315
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000316 switch (codecType_) {
317 case kVideoCodecVP8:
318 max_pending_frames_ = kMaxPendingFramesVp8;
319 break;
320 case kVideoCodecH264:
321 max_pending_frames_ = kMaxPendingFramesH264;
322 break;
323 default:
324 max_pending_frames_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000325 }
326 start_time_ms_ = GetCurrentTimeMs();
327 current_frames_ = 0;
328 current_bytes_ = 0;
329 current_decoding_time_ms_ = 0;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000330 output_timestamp_ = 0;
331 output_ntp_time_ms_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000332 timestamps_.clear();
333 ntp_times_ms_.clear();
334 frame_rtc_times_ms_.clear();
335
336 jobjectArray input_buffers = (jobjectArray)GetObjectField(
337 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
338 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
339 input_buffers_.resize(num_input_buffers);
340 for (size_t i = 0; i < num_input_buffers; ++i) {
341 input_buffers_[i] =
342 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
343 CHECK_EXCEPTION(jni);
344 }
345
346 if (use_surface_) {
347 jobject surface_texture = GetObjectField(
348 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
349 if (previous_surface_texture_ != NULL) {
350 jni->DeleteGlobalRef(previous_surface_texture_);
351 }
352 previous_surface_texture_ = surface_texture_;
353 surface_texture_ = jni->NewGlobalRef(surface_texture);
354 }
355 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
356
357 return WEBRTC_VIDEO_CODEC_OK;
358}
359
360int32_t MediaCodecVideoDecoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000361 ALOGD("DecoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000362 return codec_thread_->Invoke<int32_t>(
363 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
364}
365
366int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
367 if (!inited_) {
368 return WEBRTC_VIDEO_CODEC_OK;
369 }
370 CheckOnCodecThread();
371 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000372 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000373 ScopedLocalRefFrame local_ref_frame(jni);
374 for (size_t i = 0; i < input_buffers_.size(); i++) {
375 jni->DeleteGlobalRef(input_buffers_[i]);
376 }
377 input_buffers_.clear();
378 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
379 CHECK_EXCEPTION(jni);
380 rtc::MessageQueueManager::Clear(this);
381 inited_ = false;
382 return WEBRTC_VIDEO_CODEC_OK;
383}
384
385void MediaCodecVideoDecoder::CheckOnCodecThread() {
386 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
387 << "Running on wrong thread!";
388}
389
390int32_t MediaCodecVideoDecoder::Decode(
391 const EncodedImage& inputImage,
392 bool missingFrames,
393 const RTPFragmentationHeader* fragmentation,
394 const CodecSpecificInfo* codecSpecificInfo,
395 int64_t renderTimeMs) {
396 if (!inited_) {
397 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
398 }
399 if (callback_ == NULL) {
400 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
401 }
402 if (inputImage._buffer == NULL && inputImage._length > 0) {
403 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
404 }
405 // Check if encoded frame dimension has changed.
406 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
407 (inputImage._encodedWidth != codec_.width ||
408 inputImage._encodedHeight != codec_.height)) {
409 codec_.width = inputImage._encodedWidth;
410 codec_.height = inputImage._encodedHeight;
411 InitDecode(&codec_, 1);
412 }
413
414 // Always start with a complete key frame.
415 if (key_frame_required_) {
416 if (inputImage._frameType != webrtc::kKeyFrame) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000417 ALOGE("Key frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000418 return WEBRTC_VIDEO_CODEC_ERROR;
419 }
420 if (!inputImage._completeFrame) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000421 ALOGE("Complete frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000422 return WEBRTC_VIDEO_CODEC_ERROR;
423 }
424 key_frame_required_ = false;
425 }
426 if (inputImage._length == 0) {
427 return WEBRTC_VIDEO_CODEC_ERROR;
428 }
429
430 return codec_thread_->Invoke<int32_t>(Bind(
431 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
432}
433
434int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
435 const EncodedImage& inputImage) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000436 CheckOnCodecThread();
437 JNIEnv* jni = AttachCurrentThreadIfNeeded();
438 ScopedLocalRefFrame local_ref_frame(jni);
439
440 // Try to drain the decoder and wait until output is not too
441 // much behind the input.
442 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000443 ALOGV("Received: %d. Decoded: %d. Wait for output...",
444 frames_received_, frames_decoded_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000445 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
446 error_count_++;
447 Reset();
448 return WEBRTC_VIDEO_CODEC_ERROR;
449 }
450 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
451 ALOGE("Output buffer dequeue timeout");
452 error_count_++;
453 Reset();
454 return WEBRTC_VIDEO_CODEC_ERROR;
455 }
456 }
457
458 // Get input buffer.
459 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
460 j_dequeue_input_buffer_method_);
461 CHECK_EXCEPTION(jni);
462 if (j_input_buffer_index < 0) {
463 ALOGE("dequeueInputBuffer error");
464 error_count_++;
465 Reset();
466 return WEBRTC_VIDEO_CODEC_ERROR;
467 }
468
469 // Copy encoded data to Java ByteBuffer.
470 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
471 uint8* buffer =
472 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
473 CHECK(buffer) << "Indirect buffer??";
474 int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
475 CHECK_EXCEPTION(jni);
476 if (buffer_capacity < inputImage._length) {
477 ALOGE("Input frame size %d is bigger than buffer size %d.",
478 inputImage._length, buffer_capacity);
479 error_count_++;
480 Reset();
481 return WEBRTC_VIDEO_CODEC_ERROR;
482 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000483 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
484 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
485 frames_received_, inputImage._frameType, j_input_buffer_index,
486 timestamp_us / 1000, inputImage._length);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000487 memcpy(buffer, inputImage._buffer, inputImage._length);
488
489 // Save input image timestamps for later output.
490 frames_received_++;
491 current_bytes_ += inputImage._length;
492 timestamps_.push_back(inputImage._timeStamp);
493 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
494 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
495
496 // Feed input to decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000497 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
498 j_queue_input_buffer_method_,
499 j_input_buffer_index,
500 inputImage._length,
501 timestamp_us);
502 CHECK_EXCEPTION(jni);
503 if (!success) {
504 ALOGE("queueInputBuffer error");
505 error_count_++;
506 Reset();
507 return WEBRTC_VIDEO_CODEC_ERROR;
508 }
509
510 // Try to drain the decoder
511 if (!DeliverPendingOutputs(jni, 0)) {
512 ALOGE("DeliverPendingOutputs error");
513 error_count_++;
514 Reset();
515 return WEBRTC_VIDEO_CODEC_ERROR;
516 }
517
518 return WEBRTC_VIDEO_CODEC_OK;
519}
520
521bool MediaCodecVideoDecoder::DeliverPendingOutputs(
522 JNIEnv* jni, int dequeue_timeout_us) {
523 if (frames_received_ <= frames_decoded_) {
524 // No need to query for output buffers - decoder is drained.
525 return true;
526 }
527 // Get decoder output.
528 jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
529 *j_media_codec_video_decoder_,
530 j_dequeue_output_buffer_method_,
531 dequeue_timeout_us);
532
533 CHECK_EXCEPTION(jni);
534 if (IsNull(jni, j_decoder_output_buffer_info)) {
535 return true;
536 }
537
538 // Extract output buffer info from Java DecoderOutputBufferInfo.
539 int output_buffer_index =
540 GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
541 if (output_buffer_index < 0) {
542 ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
543 return false;
544 }
545 int output_buffer_offset =
546 GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
547 int output_buffer_size =
548 GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000549 long output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer_info,
550 j_info_presentation_timestamp_us_field_) / 1000;
551
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000552 CHECK_EXCEPTION(jni);
553
554 // Get decoded video frame properties.
555 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
556 j_color_format_field_);
557 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
558 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
559 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
560 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
561 j_slice_height_field_);
562 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
563 j_textureID_field_);
564
565 // Extract data from Java ByteBuffer and create output yuv420 frame -
566 // for non surface decoding only.
567 if (!use_surface_) {
568 if (output_buffer_size < width * height * 3 / 2) {
569 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
570 return false;
571 }
572 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
573 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
574 jobject output_buffer =
575 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
576 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
577 output_buffer));
578 CHECK_EXCEPTION(jni);
579 payload += output_buffer_offset;
580
581 // Create yuv420 frame.
582 if (color_format == COLOR_FormatYUV420Planar) {
583 decoded_image_.CreateFrame(
hbos@webrtc.org93d9d652015-03-16 13:26:00 +0000584 payload,
585 payload + (stride * slice_height),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000586 payload + (5 * stride * slice_height / 4),
587 width, height,
588 stride, stride / 2, stride / 2);
589 } else {
590 // All other supported formats are nv12.
591 decoded_image_.CreateEmptyFrame(width, height, width,
592 width / 2, width / 2);
593 libyuv::NV12ToI420(
594 payload, stride,
595 payload + stride * slice_height, stride,
596 decoded_image_.buffer(webrtc::kYPlane),
597 decoded_image_.stride(webrtc::kYPlane),
598 decoded_image_.buffer(webrtc::kUPlane),
599 decoded_image_.stride(webrtc::kUPlane),
600 decoded_image_.buffer(webrtc::kVPlane),
601 decoded_image_.stride(webrtc::kVPlane),
602 width, height);
603 }
604 }
605
606 // Get frame timestamps from a queue.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000607 if (timestamps_.size() > 0) {
608 output_timestamp_ = timestamps_.front();
609 timestamps_.erase(timestamps_.begin());
610 }
611 if (ntp_times_ms_.size() > 0) {
612 output_ntp_time_ms_ = ntp_times_ms_.front();
613 ntp_times_ms_.erase(ntp_times_ms_.begin());
614 }
615 int64_t frame_decoding_time_ms = 0;
616 if (frame_rtc_times_ms_.size() > 0) {
617 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
618 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
619 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000620 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000621 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000622 color_format, output_timestamps_ms, frame_decoding_time_ms);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000623
624 // Return output buffer back to codec.
625 bool success = jni->CallBooleanMethod(
626 *j_media_codec_video_decoder_,
627 j_release_output_buffer_method_,
628 output_buffer_index,
629 use_surface_);
630 CHECK_EXCEPTION(jni);
631 if (!success) {
632 ALOGE("releaseOutputBuffer error");
633 return false;
634 }
635
636 // Calculate and print decoding statistics - every 3 seconds.
637 frames_decoded_++;
638 current_frames_++;
639 current_decoding_time_ms_ += frame_decoding_time_ms;
640 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
641 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
642 current_frames_ > 0) {
643 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
644 current_bytes_ * 8 / statistic_time_ms,
645 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
646 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
647 start_time_ms_ = GetCurrentTimeMs();
648 current_frames_ = 0;
649 current_bytes_ = 0;
650 current_decoding_time_ms_ = 0;
651 }
652
653 // Callback - output decoded frame.
654 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
655 if (use_surface_) {
656 native_handle_.SetTextureObject(surface_texture_, texture_id);
Per9b3f56e2015-04-09 13:44:16 +0200657 I420VideoFrame texture_image(&native_handle_, width, height,
658 output_timestamp_, 0, webrtc::kVideoRotation_0,
659 rtc::Callback0<void>());
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000660 texture_image.set_ntp_time_ms(output_ntp_time_ms_);
magjed@webrtc.org2056ee32015-03-16 13:46:52 +0000661 callback_status = callback_->Decoded(texture_image);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000662 } else {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000663 decoded_image_.set_timestamp(output_timestamp_);
664 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
magjed@webrtc.org2056ee32015-03-16 13:46:52 +0000665 callback_status = callback_->Decoded(decoded_image_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000666 }
667 if (callback_status > 0) {
668 ALOGE("callback error");
669 }
670
671 return true;
672}
673
674int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
675 DecodedImageCallback* callback) {
676 callback_ = callback;
677 return WEBRTC_VIDEO_CODEC_OK;
678}
679
680int32_t MediaCodecVideoDecoder::Reset() {
681 ALOGD("DecoderReset");
682 if (!inited_) {
683 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
684 }
685 return InitDecode(&codec_, 1);
686}
687
688void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
689 JNIEnv* jni = AttachCurrentThreadIfNeeded();
690 ScopedLocalRefFrame local_ref_frame(jni);
691 if (!inited_) {
692 return;
693 }
694 // We only ever send one message to |this| directly (not through a Bind()'d
695 // functor), so expect no ID/data.
696 CHECK(!msg->message_id) << "Unexpected message!";
697 CHECK(!msg->pdata) << "Unexpected message!";
698 CheckOnCodecThread();
699
700 if (!DeliverPendingOutputs(jni, 0)) {
701 error_count_++;
702 Reset();
703 }
704 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
705}
706
707int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
708 jobject render_egl_context) {
709 ALOGD("SetAndroidObjects for surface decoding.");
710 if (render_egl_context_) {
711 jni->DeleteGlobalRef(render_egl_context_);
712 }
713 if (IsNull(jni, render_egl_context)) {
714 render_egl_context_ = NULL;
715 } else {
716 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
717 CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
718 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
719 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
720 ALOGE("Wrong EGL Context.");
721 jni->DeleteGlobalRef(render_egl_context_);
722 render_egl_context_ = NULL;
723 }
724 }
725 if (render_egl_context_ == NULL) {
726 ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
727 }
728 return 0;
729}
730
731MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
732 JNIEnv* jni = AttachCurrentThreadIfNeeded();
733 ScopedLocalRefFrame local_ref_frame(jni);
734 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000735 supported_codec_types_.clear();
736
737 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000738 j_decoder_class,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000739 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000740 CHECK_EXCEPTION(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000741 if (is_vp8_hw_supported) {
742 ALOGD("VP8 HW Decoder supported.");
743 supported_codec_types_.push_back(kVideoCodecVP8);
744 }
745
746 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
747 j_decoder_class,
748 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
749 CHECK_EXCEPTION(jni);
750 if (is_h264_hw_supported) {
751 ALOGD("H264 HW Decoder supported.");
752 supported_codec_types_.push_back(kVideoCodecH264);
753 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000754}
755
756MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
757
758webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000759 VideoCodecType type) {
760 if (supported_codec_types_.empty()) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000761 return NULL;
762 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000763 for (std::vector<VideoCodecType>::const_iterator it =
764 supported_codec_types_.begin(); it != supported_codec_types_.end();
765 ++it) {
766 if (*it == type) {
767 ALOGD("Create HW video decoder for type %d.", (int)type);
768 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type);
769 }
770 }
771 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000772}
773
774void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
775 webrtc::VideoDecoder* decoder) {
776 delete decoder;
777}
778
779} // namespace webrtc_jni
780