blob: 88248636b4a3abb44140f5e9603396bc11f440eb [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include <vector>
30
31#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
32#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33#include "talk/app/webrtc/java/jni/classreferenceholder.h"
34#include "talk/app/webrtc/java/jni/native_handle_impl.h"
35#include "webrtc/base/bind.h"
36#include "webrtc/base/checks.h"
37#include "webrtc/base/logging.h"
38#include "webrtc/base/thread.h"
39#include "webrtc/common_video/interface/texture_video_frame.h"
40#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
41#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
42#include "webrtc/system_wrappers/interface/tick_util.h"
43#include "third_party/libyuv/include/libyuv/convert.h"
44#include "third_party/libyuv/include/libyuv/convert_from.h"
45#include "third_party/libyuv/include/libyuv/video_common.h"
46
47using rtc::Bind;
48using rtc::Thread;
49using rtc::ThreadManager;
50using rtc::scoped_ptr;
51
52using webrtc::CodecSpecificInfo;
53using webrtc::DecodedImageCallback;
54using webrtc::EncodedImage;
55using webrtc::I420VideoFrame;
56using webrtc::RTPFragmentationHeader;
57using webrtc::TextureVideoFrame;
58using webrtc::TickTime;
59using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000060using webrtc::VideoCodecType;
61using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000062using webrtc::kVideoCodecVP8;
63
64namespace webrtc_jni {
65
66jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
67
68class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
69 public rtc::MessageHandler {
70 public:
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000071 explicit MediaCodecVideoDecoder(JNIEnv* jni, VideoCodecType codecType);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000072 virtual ~MediaCodecVideoDecoder();
73
74 static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
75
76 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
77 override;
78
79 int32_t Decode(
80 const EncodedImage& inputImage, bool missingFrames,
81 const RTPFragmentationHeader* fragmentation,
82 const CodecSpecificInfo* codecSpecificInfo = NULL,
83 int64_t renderTimeMs = -1) override;
84
85 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
86 override;
87
88 int32_t Release() override;
89
90 int32_t Reset() override;
91 // rtc::MessageHandler implementation.
92 void OnMessage(rtc::Message* msg) override;
93
94 private:
95 // CHECK-fail if not running on |codec_thread_|.
96 void CheckOnCodecThread();
97
98 int32_t InitDecodeOnCodecThread();
99 int32_t ReleaseOnCodecThread();
100 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
101 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
102 // true on success.
103 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
104
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000105 // Type of video codec.
106 VideoCodecType codecType_;
107
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000108 bool key_frame_required_;
109 bool inited_;
110 bool use_surface_;
111 int error_count_;
112 VideoCodec codec_;
113 I420VideoFrame decoded_image_;
114 NativeHandleImpl native_handle_;
115 DecodedImageCallback* callback_;
116 int frames_received_; // Number of frames received by decoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000117 int frames_decoded_; // Number of frames decoded by decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000118 int64_t start_time_ms_; // Start time for statistics.
119 int current_frames_; // Number of frames in the current statistics interval.
120 int current_bytes_; // Encoded bytes in the current statistics interval.
121 int current_decoding_time_ms_; // Overall decoding time in the current second
122 uint32_t max_pending_frames_; // Maximum number of pending input frames
123 std::vector<int32_t> timestamps_;
124 std::vector<int64_t> ntp_times_ms_;
125 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
126 // decoder input.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000127 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
128 int64_t output_ntp_time_ms_; // Last output frame ntp time from
129 // ntp_times_ms_ queue.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000130
131 // State that is constant for the lifetime of this object once the ctor
132 // returns.
133 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
134 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
135 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
136 jmethodID j_init_decode_method_;
137 jmethodID j_release_method_;
138 jmethodID j_dequeue_input_buffer_method_;
139 jmethodID j_queue_input_buffer_method_;
140 jmethodID j_dequeue_output_buffer_method_;
141 jmethodID j_release_output_buffer_method_;
142 // MediaCodecVideoDecoder fields.
143 jfieldID j_input_buffers_field_;
144 jfieldID j_output_buffers_field_;
145 jfieldID j_color_format_field_;
146 jfieldID j_width_field_;
147 jfieldID j_height_field_;
148 jfieldID j_stride_field_;
149 jfieldID j_slice_height_field_;
150 jfieldID j_surface_texture_field_;
151 jfieldID j_textureID_field_;
152 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
153 jfieldID j_info_index_field_;
154 jfieldID j_info_offset_field_;
155 jfieldID j_info_size_field_;
156 jfieldID j_info_presentation_timestamp_us_field_;
157
158 // Global references; must be deleted in Release().
159 std::vector<jobject> input_buffers_;
160 jobject surface_texture_;
161 jobject previous_surface_texture_;
162};
163
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000164MediaCodecVideoDecoder::MediaCodecVideoDecoder(
165 JNIEnv* jni, VideoCodecType codecType) :
166 codecType_(codecType),
167 key_frame_required_(true),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000168 inited_(false),
169 error_count_(0),
170 surface_texture_(NULL),
171 previous_surface_texture_(NULL),
172 codec_thread_(new Thread()),
173 j_media_codec_video_decoder_class_(
174 jni,
175 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
176 j_media_codec_video_decoder_(
177 jni,
178 jni->NewObject(*j_media_codec_video_decoder_class_,
179 GetMethodID(jni,
180 *j_media_codec_video_decoder_class_,
181 "<init>",
182 "()V"))) {
183 ScopedLocalRefFrame local_ref_frame(jni);
184 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
185 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
186
187 j_init_decode_method_ = GetMethodID(
188 jni, *j_media_codec_video_decoder_class_, "initDecode",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000189 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
190 "IIZZLandroid/opengl/EGLContext;)Z");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000191 j_release_method_ =
192 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
193 j_dequeue_input_buffer_method_ = GetMethodID(
194 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
195 j_queue_input_buffer_method_ = GetMethodID(
196 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
197 j_dequeue_output_buffer_method_ = GetMethodID(
198 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
199 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
200 j_release_output_buffer_method_ = GetMethodID(
201 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
202
203 j_input_buffers_field_ = GetFieldID(
204 jni, *j_media_codec_video_decoder_class_,
205 "inputBuffers", "[Ljava/nio/ByteBuffer;");
206 j_output_buffers_field_ = GetFieldID(
207 jni, *j_media_codec_video_decoder_class_,
208 "outputBuffers", "[Ljava/nio/ByteBuffer;");
209 j_color_format_field_ = GetFieldID(
210 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
211 j_width_field_ = GetFieldID(
212 jni, *j_media_codec_video_decoder_class_, "width", "I");
213 j_height_field_ = GetFieldID(
214 jni, *j_media_codec_video_decoder_class_, "height", "I");
215 j_stride_field_ = GetFieldID(
216 jni, *j_media_codec_video_decoder_class_, "stride", "I");
217 j_slice_height_field_ = GetFieldID(
218 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
219 j_textureID_field_ = GetFieldID(
220 jni, *j_media_codec_video_decoder_class_, "textureID", "I");
221 j_surface_texture_field_ = GetFieldID(
222 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
223 "Landroid/graphics/SurfaceTexture;");
224
225 jclass j_decoder_output_buffer_info_class = FindClass(jni,
226 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
227 j_info_index_field_ = GetFieldID(
228 jni, j_decoder_output_buffer_info_class, "index", "I");
229 j_info_offset_field_ = GetFieldID(
230 jni, j_decoder_output_buffer_info_class, "offset", "I");
231 j_info_size_field_ = GetFieldID(
232 jni, j_decoder_output_buffer_info_class, "size", "I");
233 j_info_presentation_timestamp_us_field_ = GetFieldID(
234 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
235
236 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
237 use_surface_ = true;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000238 if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000239 use_surface_ = false;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000240 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000241 memset(&codec_, 0, sizeof(codec_));
242 AllowBlockingCalls();
243}
244
245MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
246 // Call Release() to ensure no more callbacks to us after we are deleted.
247 Release();
248 // Delete global references.
249 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000250 if (previous_surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000251 jni->DeleteGlobalRef(previous_surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000252 }
253 if (surface_texture_ != NULL) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000254 jni->DeleteGlobalRef(surface_texture_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000255 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000256}
257
258int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
259 int32_t numberOfCores) {
260 if (inst == NULL) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000261 ALOGE("NULL VideoCodec instance");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000262 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
263 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000264 // Factory should guard against other codecs being used with us.
265 CHECK(inst->codecType == codecType_) << "Unsupported codec " <<
266 inst->codecType << " for " << codecType_;
267
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000268 int ret_val = Release();
269 if (ret_val < 0) {
270 return ret_val;
271 }
272 // Save VideoCodec instance for later.
273 if (&codec_ != inst) {
274 codec_ = *inst;
275 }
276 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
277
278 // Always start with a complete key frame.
279 key_frame_required_ = true;
280 frames_received_ = 0;
281 frames_decoded_ = 0;
282
283 // Call Java init.
284 return codec_thread_->Invoke<int32_t>(
285 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
286}
287
288int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
289 CheckOnCodecThread();
290 JNIEnv* jni = AttachCurrentThreadIfNeeded();
291 ScopedLocalRefFrame local_ref_frame(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000292 ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d. Errors: %d",
293 (int)codecType_, codec_.width, codec_.height,
294 codec_.maxFramerate, error_count_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000295 bool use_sw_codec = false;
296 if (error_count_ > 1) {
297 // If more than one critical errors happen for HW codec, switch to SW codec.
298 use_sw_codec = true;
299 }
300
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000301 jobject j_video_codec_enum = JavaEnumFromIndex(
302 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000303 bool success = jni->CallBooleanMethod(
304 *j_media_codec_video_decoder_,
305 j_init_decode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000306 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000307 codec_.width,
308 codec_.height,
309 use_sw_codec,
310 use_surface_,
311 MediaCodecVideoDecoderFactory::render_egl_context_);
312 CHECK_EXCEPTION(jni);
313 if (!success) {
314 return WEBRTC_VIDEO_CODEC_ERROR;
315 }
316 inited_ = true;
317
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000318 switch (codecType_) {
319 case kVideoCodecVP8:
320 max_pending_frames_ = kMaxPendingFramesVp8;
321 break;
322 case kVideoCodecH264:
323 max_pending_frames_ = kMaxPendingFramesH264;
324 break;
325 default:
326 max_pending_frames_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000327 }
328 start_time_ms_ = GetCurrentTimeMs();
329 current_frames_ = 0;
330 current_bytes_ = 0;
331 current_decoding_time_ms_ = 0;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000332 output_timestamp_ = 0;
333 output_ntp_time_ms_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000334 timestamps_.clear();
335 ntp_times_ms_.clear();
336 frame_rtc_times_ms_.clear();
337
338 jobjectArray input_buffers = (jobjectArray)GetObjectField(
339 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
340 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
341 input_buffers_.resize(num_input_buffers);
342 for (size_t i = 0; i < num_input_buffers; ++i) {
343 input_buffers_[i] =
344 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
345 CHECK_EXCEPTION(jni);
346 }
347
348 if (use_surface_) {
349 jobject surface_texture = GetObjectField(
350 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
351 if (previous_surface_texture_ != NULL) {
352 jni->DeleteGlobalRef(previous_surface_texture_);
353 }
354 previous_surface_texture_ = surface_texture_;
355 surface_texture_ = jni->NewGlobalRef(surface_texture);
356 }
357 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
358
359 return WEBRTC_VIDEO_CODEC_OK;
360}
361
362int32_t MediaCodecVideoDecoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000363 ALOGD("DecoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000364 return codec_thread_->Invoke<int32_t>(
365 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
366}
367
368int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
369 if (!inited_) {
370 return WEBRTC_VIDEO_CODEC_OK;
371 }
372 CheckOnCodecThread();
373 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000374 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000375 ScopedLocalRefFrame local_ref_frame(jni);
376 for (size_t i = 0; i < input_buffers_.size(); i++) {
377 jni->DeleteGlobalRef(input_buffers_[i]);
378 }
379 input_buffers_.clear();
380 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
381 CHECK_EXCEPTION(jni);
382 rtc::MessageQueueManager::Clear(this);
383 inited_ = false;
384 return WEBRTC_VIDEO_CODEC_OK;
385}
386
387void MediaCodecVideoDecoder::CheckOnCodecThread() {
388 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
389 << "Running on wrong thread!";
390}
391
392int32_t MediaCodecVideoDecoder::Decode(
393 const EncodedImage& inputImage,
394 bool missingFrames,
395 const RTPFragmentationHeader* fragmentation,
396 const CodecSpecificInfo* codecSpecificInfo,
397 int64_t renderTimeMs) {
398 if (!inited_) {
399 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
400 }
401 if (callback_ == NULL) {
402 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
403 }
404 if (inputImage._buffer == NULL && inputImage._length > 0) {
405 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
406 }
407 // Check if encoded frame dimension has changed.
408 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
409 (inputImage._encodedWidth != codec_.width ||
410 inputImage._encodedHeight != codec_.height)) {
411 codec_.width = inputImage._encodedWidth;
412 codec_.height = inputImage._encodedHeight;
413 InitDecode(&codec_, 1);
414 }
415
416 // Always start with a complete key frame.
417 if (key_frame_required_) {
418 if (inputImage._frameType != webrtc::kKeyFrame) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000419 ALOGE("Key frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000420 return WEBRTC_VIDEO_CODEC_ERROR;
421 }
422 if (!inputImage._completeFrame) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000423 ALOGE("Complete frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000424 return WEBRTC_VIDEO_CODEC_ERROR;
425 }
426 key_frame_required_ = false;
427 }
428 if (inputImage._length == 0) {
429 return WEBRTC_VIDEO_CODEC_ERROR;
430 }
431
432 return codec_thread_->Invoke<int32_t>(Bind(
433 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
434}
435
436int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
437 const EncodedImage& inputImage) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000438 CheckOnCodecThread();
439 JNIEnv* jni = AttachCurrentThreadIfNeeded();
440 ScopedLocalRefFrame local_ref_frame(jni);
441
442 // Try to drain the decoder and wait until output is not too
443 // much behind the input.
444 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000445 ALOGV("Received: %d. Decoded: %d. Wait for output...",
446 frames_received_, frames_decoded_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000447 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
448 error_count_++;
449 Reset();
450 return WEBRTC_VIDEO_CODEC_ERROR;
451 }
452 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
453 ALOGE("Output buffer dequeue timeout");
454 error_count_++;
455 Reset();
456 return WEBRTC_VIDEO_CODEC_ERROR;
457 }
458 }
459
460 // Get input buffer.
461 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
462 j_dequeue_input_buffer_method_);
463 CHECK_EXCEPTION(jni);
464 if (j_input_buffer_index < 0) {
465 ALOGE("dequeueInputBuffer error");
466 error_count_++;
467 Reset();
468 return WEBRTC_VIDEO_CODEC_ERROR;
469 }
470
471 // Copy encoded data to Java ByteBuffer.
472 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
473 uint8* buffer =
474 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
475 CHECK(buffer) << "Indirect buffer??";
476 int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
477 CHECK_EXCEPTION(jni);
478 if (buffer_capacity < inputImage._length) {
479 ALOGE("Input frame size %d is bigger than buffer size %d.",
480 inputImage._length, buffer_capacity);
481 error_count_++;
482 Reset();
483 return WEBRTC_VIDEO_CODEC_ERROR;
484 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000485 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
486 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
487 frames_received_, inputImage._frameType, j_input_buffer_index,
488 timestamp_us / 1000, inputImage._length);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000489 memcpy(buffer, inputImage._buffer, inputImage._length);
490
491 // Save input image timestamps for later output.
492 frames_received_++;
493 current_bytes_ += inputImage._length;
494 timestamps_.push_back(inputImage._timeStamp);
495 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
496 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
497
498 // Feed input to decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000499 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
500 j_queue_input_buffer_method_,
501 j_input_buffer_index,
502 inputImage._length,
503 timestamp_us);
504 CHECK_EXCEPTION(jni);
505 if (!success) {
506 ALOGE("queueInputBuffer error");
507 error_count_++;
508 Reset();
509 return WEBRTC_VIDEO_CODEC_ERROR;
510 }
511
512 // Try to drain the decoder
513 if (!DeliverPendingOutputs(jni, 0)) {
514 ALOGE("DeliverPendingOutputs error");
515 error_count_++;
516 Reset();
517 return WEBRTC_VIDEO_CODEC_ERROR;
518 }
519
520 return WEBRTC_VIDEO_CODEC_OK;
521}
522
523bool MediaCodecVideoDecoder::DeliverPendingOutputs(
524 JNIEnv* jni, int dequeue_timeout_us) {
525 if (frames_received_ <= frames_decoded_) {
526 // No need to query for output buffers - decoder is drained.
527 return true;
528 }
529 // Get decoder output.
530 jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
531 *j_media_codec_video_decoder_,
532 j_dequeue_output_buffer_method_,
533 dequeue_timeout_us);
534
535 CHECK_EXCEPTION(jni);
536 if (IsNull(jni, j_decoder_output_buffer_info)) {
537 return true;
538 }
539
540 // Extract output buffer info from Java DecoderOutputBufferInfo.
541 int output_buffer_index =
542 GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
543 if (output_buffer_index < 0) {
544 ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
545 return false;
546 }
547 int output_buffer_offset =
548 GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
549 int output_buffer_size =
550 GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000551 long output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer_info,
552 j_info_presentation_timestamp_us_field_) / 1000;
553
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000554 CHECK_EXCEPTION(jni);
555
556 // Get decoded video frame properties.
557 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
558 j_color_format_field_);
559 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
560 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
561 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
562 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
563 j_slice_height_field_);
564 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
565 j_textureID_field_);
566
567 // Extract data from Java ByteBuffer and create output yuv420 frame -
568 // for non surface decoding only.
569 if (!use_surface_) {
570 if (output_buffer_size < width * height * 3 / 2) {
571 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
572 return false;
573 }
574 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
575 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
576 jobject output_buffer =
577 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
578 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
579 output_buffer));
580 CHECK_EXCEPTION(jni);
581 payload += output_buffer_offset;
582
583 // Create yuv420 frame.
584 if (color_format == COLOR_FormatYUV420Planar) {
585 decoded_image_.CreateFrame(
586 stride * slice_height, payload,
587 (stride * slice_height) / 4, payload + (stride * slice_height),
588 (stride * slice_height) / 4,
589 payload + (5 * stride * slice_height / 4),
590 width, height,
591 stride, stride / 2, stride / 2);
592 } else {
593 // All other supported formats are nv12.
594 decoded_image_.CreateEmptyFrame(width, height, width,
595 width / 2, width / 2);
596 libyuv::NV12ToI420(
597 payload, stride,
598 payload + stride * slice_height, stride,
599 decoded_image_.buffer(webrtc::kYPlane),
600 decoded_image_.stride(webrtc::kYPlane),
601 decoded_image_.buffer(webrtc::kUPlane),
602 decoded_image_.stride(webrtc::kUPlane),
603 decoded_image_.buffer(webrtc::kVPlane),
604 decoded_image_.stride(webrtc::kVPlane),
605 width, height);
606 }
607 }
608
609 // Get frame timestamps from a queue.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000610 if (timestamps_.size() > 0) {
611 output_timestamp_ = timestamps_.front();
612 timestamps_.erase(timestamps_.begin());
613 }
614 if (ntp_times_ms_.size() > 0) {
615 output_ntp_time_ms_ = ntp_times_ms_.front();
616 ntp_times_ms_.erase(ntp_times_ms_.begin());
617 }
618 int64_t frame_decoding_time_ms = 0;
619 if (frame_rtc_times_ms_.size() > 0) {
620 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
621 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
622 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000623 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000624 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000625 color_format, output_timestamps_ms, frame_decoding_time_ms);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000626
627 // Return output buffer back to codec.
628 bool success = jni->CallBooleanMethod(
629 *j_media_codec_video_decoder_,
630 j_release_output_buffer_method_,
631 output_buffer_index,
632 use_surface_);
633 CHECK_EXCEPTION(jni);
634 if (!success) {
635 ALOGE("releaseOutputBuffer error");
636 return false;
637 }
638
639 // Calculate and print decoding statistics - every 3 seconds.
640 frames_decoded_++;
641 current_frames_++;
642 current_decoding_time_ms_ += frame_decoding_time_ms;
643 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
644 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
645 current_frames_ > 0) {
646 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
647 current_bytes_ * 8 / statistic_time_ms,
648 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
649 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
650 start_time_ms_ = GetCurrentTimeMs();
651 current_frames_ = 0;
652 current_bytes_ = 0;
653 current_decoding_time_ms_ = 0;
654 }
655
656 // Callback - output decoded frame.
657 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
658 if (use_surface_) {
659 native_handle_.SetTextureObject(surface_texture_, texture_id);
660 TextureVideoFrame texture_image(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000661 &native_handle_, width, height, output_timestamp_, 0);
662 texture_image.set_ntp_time_ms(output_ntp_time_ms_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000663 callback_status = callback_->Decoded(texture_image);
664 } else {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000665 decoded_image_.set_timestamp(output_timestamp_);
666 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000667 callback_status = callback_->Decoded(decoded_image_);
668 }
669 if (callback_status > 0) {
670 ALOGE("callback error");
671 }
672
673 return true;
674}
675
676int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
677 DecodedImageCallback* callback) {
678 callback_ = callback;
679 return WEBRTC_VIDEO_CODEC_OK;
680}
681
682int32_t MediaCodecVideoDecoder::Reset() {
683 ALOGD("DecoderReset");
684 if (!inited_) {
685 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
686 }
687 return InitDecode(&codec_, 1);
688}
689
690void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
691 JNIEnv* jni = AttachCurrentThreadIfNeeded();
692 ScopedLocalRefFrame local_ref_frame(jni);
693 if (!inited_) {
694 return;
695 }
696 // We only ever send one message to |this| directly (not through a Bind()'d
697 // functor), so expect no ID/data.
698 CHECK(!msg->message_id) << "Unexpected message!";
699 CHECK(!msg->pdata) << "Unexpected message!";
700 CheckOnCodecThread();
701
702 if (!DeliverPendingOutputs(jni, 0)) {
703 error_count_++;
704 Reset();
705 }
706 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
707}
708
709int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
710 jobject render_egl_context) {
711 ALOGD("SetAndroidObjects for surface decoding.");
712 if (render_egl_context_) {
713 jni->DeleteGlobalRef(render_egl_context_);
714 }
715 if (IsNull(jni, render_egl_context)) {
716 render_egl_context_ = NULL;
717 } else {
718 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
719 CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
720 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
721 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
722 ALOGE("Wrong EGL Context.");
723 jni->DeleteGlobalRef(render_egl_context_);
724 render_egl_context_ = NULL;
725 }
726 }
727 if (render_egl_context_ == NULL) {
728 ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
729 }
730 return 0;
731}
732
733MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
734 JNIEnv* jni = AttachCurrentThreadIfNeeded();
735 ScopedLocalRefFrame local_ref_frame(jni);
736 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000737 supported_codec_types_.clear();
738
739 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000740 j_decoder_class,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000741 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000742 CHECK_EXCEPTION(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000743 if (is_vp8_hw_supported) {
744 ALOGD("VP8 HW Decoder supported.");
745 supported_codec_types_.push_back(kVideoCodecVP8);
746 }
747
748 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
749 j_decoder_class,
750 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
751 CHECK_EXCEPTION(jni);
752 if (is_h264_hw_supported) {
753 ALOGD("H264 HW Decoder supported.");
754 supported_codec_types_.push_back(kVideoCodecH264);
755 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000756}
757
758MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
759
760webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000761 VideoCodecType type) {
762 if (supported_codec_types_.empty()) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000763 return NULL;
764 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000765 for (std::vector<VideoCodecType>::const_iterator it =
766 supported_codec_types_.begin(); it != supported_codec_types_.end();
767 ++it) {
768 if (*it == type) {
769 ALOGD("Create HW video decoder for type %d.", (int)type);
770 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type);
771 }
772 }
773 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000774}
775
776void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
777 webrtc::VideoDecoder* decoder) {
778 delete decoder;
779}
780
781} // namespace webrtc_jni
782