blob: 6012e186f919efe2d9b2126720c3eb78efc313ab [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include <vector>
30
31#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
32#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33#include "talk/app/webrtc/java/jni/classreferenceholder.h"
34#include "talk/app/webrtc/java/jni/native_handle_impl.h"
Magnus Jedvert91b348c2015-10-07 22:57:06 +020035#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000036#include "webrtc/base/bind.h"
37#include "webrtc/base/checks.h"
38#include "webrtc/base/logging.h"
Magnus Jedvertbbda54e2015-09-30 16:06:37 +020039#include "webrtc/base/scoped_ref_ptr.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000040#include "webrtc/base/thread.h"
Magnus Jedvert7e319372015-10-02 15:49:38 +020041#include "webrtc/base/timeutils.h"
Magnus Jedvertbbda54e2015-09-30 16:06:37 +020042#include "webrtc/common_video/interface/i420_buffer_pool.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000043#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
44#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
45#include "webrtc/system_wrappers/interface/tick_util.h"
46#include "third_party/libyuv/include/libyuv/convert.h"
47#include "third_party/libyuv/include/libyuv/convert_from.h"
48#include "third_party/libyuv/include/libyuv/video_common.h"
49
50using rtc::Bind;
51using rtc::Thread;
52using rtc::ThreadManager;
53using rtc::scoped_ptr;
54
55using webrtc::CodecSpecificInfo;
56using webrtc::DecodedImageCallback;
57using webrtc::EncodedImage;
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -070058using webrtc::VideoFrame;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000059using webrtc::RTPFragmentationHeader;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000060using webrtc::TickTime;
61using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000062using webrtc::VideoCodecType;
63using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000064using webrtc::kVideoCodecVP8;
65
66namespace webrtc_jni {
67
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000068class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
69 public rtc::MessageHandler {
70 public:
Alex Glaznev4d2f4d12015-09-01 15:04:13 -070071 explicit MediaCodecVideoDecoder(
72 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000073 virtual ~MediaCodecVideoDecoder();
74
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000075 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
76 override;
77
78 int32_t Decode(
79 const EncodedImage& inputImage, bool missingFrames,
80 const RTPFragmentationHeader* fragmentation,
81 const CodecSpecificInfo* codecSpecificInfo = NULL,
82 int64_t renderTimeMs = -1) override;
83
84 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
85 override;
86
87 int32_t Release() override;
88
89 int32_t Reset() override;
90 // rtc::MessageHandler implementation.
91 void OnMessage(rtc::Message* msg) override;
92
93 private:
94 // CHECK-fail if not running on |codec_thread_|.
95 void CheckOnCodecThread();
96
97 int32_t InitDecodeOnCodecThread();
98 int32_t ReleaseOnCodecThread();
99 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
100 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
101 // true on success.
102 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
Alex Glaznev782671f2015-06-12 16:40:44 -0700103 int32_t ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000104
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000105 // Type of video codec.
106 VideoCodecType codecType_;
107
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000108 bool key_frame_required_;
109 bool inited_;
Alex Glaznev782671f2015-06-12 16:40:44 -0700110 bool sw_fallback_required_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000111 bool use_surface_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000112 VideoCodec codec_;
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200113 webrtc::I420BufferPool decoded_frame_pool_;
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200114 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000115 DecodedImageCallback* callback_;
116 int frames_received_; // Number of frames received by decoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000117 int frames_decoded_; // Number of frames decoded by decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000118 int64_t start_time_ms_; // Start time for statistics.
119 int current_frames_; // Number of frames in the current statistics interval.
120 int current_bytes_; // Encoded bytes in the current statistics interval.
121 int current_decoding_time_ms_; // Overall decoding time in the current second
122 uint32_t max_pending_frames_; // Maximum number of pending input frames
123 std::vector<int32_t> timestamps_;
124 std::vector<int64_t> ntp_times_ms_;
125 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
126 // decoder input.
127
128 // State that is constant for the lifetime of this object once the ctor
129 // returns.
130 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
131 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
132 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
133 jmethodID j_init_decode_method_;
134 jmethodID j_release_method_;
135 jmethodID j_dequeue_input_buffer_method_;
136 jmethodID j_queue_input_buffer_method_;
137 jmethodID j_dequeue_output_buffer_method_;
magjed44bf6f52015-10-03 02:08:00 -0700138 jmethodID j_return_decoded_byte_buffer_method_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000139 // MediaCodecVideoDecoder fields.
140 jfieldID j_input_buffers_field_;
141 jfieldID j_output_buffers_field_;
142 jfieldID j_color_format_field_;
143 jfieldID j_width_field_;
144 jfieldID j_height_field_;
145 jfieldID j_stride_field_;
146 jfieldID j_slice_height_field_;
magjed44bf6f52015-10-03 02:08:00 -0700147 // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000148 jfieldID j_textureID_field_;
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200149 jfieldID j_transform_matrix_field_;
150 jfieldID j_texture_timestamp_ns_field_;
magjed44bf6f52015-10-03 02:08:00 -0700151 // MediaCodecVideoDecoder.DecodedByteBuffer fields.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000152 jfieldID j_info_index_field_;
153 jfieldID j_info_offset_field_;
154 jfieldID j_info_size_field_;
155 jfieldID j_info_presentation_timestamp_us_field_;
156
157 // Global references; must be deleted in Release().
158 std::vector<jobject> input_buffers_;
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700159
160 // Render EGL context - owned by factory, should not be allocated/destroyed
161 // by VideoDecoder.
162 jobject render_egl_context_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000163};
164
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000165MediaCodecVideoDecoder::MediaCodecVideoDecoder(
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700166 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000167 codecType_(codecType),
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700168 render_egl_context_(render_egl_context),
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000169 key_frame_required_(true),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000170 inited_(false),
Alex Glaznev782671f2015-06-12 16:40:44 -0700171 sw_fallback_required_(false),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000172 codec_thread_(new Thread()),
173 j_media_codec_video_decoder_class_(
174 jni,
175 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
176 j_media_codec_video_decoder_(
177 jni,
178 jni->NewObject(*j_media_codec_video_decoder_class_,
179 GetMethodID(jni,
180 *j_media_codec_video_decoder_class_,
181 "<init>",
182 "()V"))) {
183 ScopedLocalRefFrame local_ref_frame(jni);
184 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
henrikg91d6ede2015-09-17 00:24:34 -0700185 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000186
187 j_init_decode_method_ = GetMethodID(
188 jni, *j_media_codec_video_decoder_class_, "initDecode",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000189 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200190 "IILorg/webrtc/SurfaceTextureHelper;)Z");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000191 j_release_method_ =
192 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
193 j_dequeue_input_buffer_method_ = GetMethodID(
194 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
195 j_queue_input_buffer_method_ = GetMethodID(
196 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
197 j_dequeue_output_buffer_method_ = GetMethodID(
198 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
magjed44bf6f52015-10-03 02:08:00 -0700199 "(I)Ljava/lang/Object;");
200 j_return_decoded_byte_buffer_method_ =
201 GetMethodID(jni, *j_media_codec_video_decoder_class_,
202 "returnDecodedByteBuffer", "(I)V");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000203
204 j_input_buffers_field_ = GetFieldID(
205 jni, *j_media_codec_video_decoder_class_,
206 "inputBuffers", "[Ljava/nio/ByteBuffer;");
207 j_output_buffers_field_ = GetFieldID(
208 jni, *j_media_codec_video_decoder_class_,
209 "outputBuffers", "[Ljava/nio/ByteBuffer;");
210 j_color_format_field_ = GetFieldID(
211 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
212 j_width_field_ = GetFieldID(
213 jni, *j_media_codec_video_decoder_class_, "width", "I");
214 j_height_field_ = GetFieldID(
215 jni, *j_media_codec_video_decoder_class_, "height", "I");
216 j_stride_field_ = GetFieldID(
217 jni, *j_media_codec_video_decoder_class_, "stride", "I");
218 j_slice_height_field_ = GetFieldID(
219 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000220
magjed44bf6f52015-10-03 02:08:00 -0700221 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
222 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
223 j_textureID_field_ = GetFieldID(
224 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200225 j_transform_matrix_field_ = GetFieldID(
226 jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F");
227 j_texture_timestamp_ns_field_ = GetFieldID(
228 jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J");
magjed44bf6f52015-10-03 02:08:00 -0700229
230 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
231 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000232 j_info_index_field_ = GetFieldID(
magjed44bf6f52015-10-03 02:08:00 -0700233 jni, j_decoder_decoded_byte_buffer_class, "index", "I");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000234 j_info_offset_field_ = GetFieldID(
magjed44bf6f52015-10-03 02:08:00 -0700235 jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000236 j_info_size_field_ = GetFieldID(
magjed44bf6f52015-10-03 02:08:00 -0700237 jni, j_decoder_decoded_byte_buffer_class, "size", "I");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000238 j_info_presentation_timestamp_us_field_ = GetFieldID(
magjed44bf6f52015-10-03 02:08:00 -0700239 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000240
241 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
Magnus Jedvert207370f2015-09-16 12:32:21 +0200242 use_surface_ = (render_egl_context_ != NULL);
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700243 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000244 memset(&codec_, 0, sizeof(codec_));
245 AllowBlockingCalls();
246}
247
248MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
249 // Call Release() to ensure no more callbacks to us after we are deleted.
250 Release();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000251}
252
253int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
254 int32_t numberOfCores) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700255 ALOGD("InitDecode.");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000256 if (inst == NULL) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000257 ALOGE("NULL VideoCodec instance");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000258 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
259 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000260 // Factory should guard against other codecs being used with us.
henrikg91d6ede2015-09-17 00:24:34 -0700261 RTC_CHECK(inst->codecType == codecType_)
262 << "Unsupported codec " << inst->codecType << " for " << codecType_;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000263
Alex Glaznev782671f2015-06-12 16:40:44 -0700264 if (sw_fallback_required_) {
265 ALOGE("InitDecode() - fallback to SW decoder");
266 return WEBRTC_VIDEO_CODEC_OK;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000267 }
268 // Save VideoCodec instance for later.
269 if (&codec_ != inst) {
270 codec_ = *inst;
271 }
272 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
273
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000274 // Call Java init.
275 return codec_thread_->Invoke<int32_t>(
276 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
277}
278
279int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
280 CheckOnCodecThread();
281 JNIEnv* jni = AttachCurrentThreadIfNeeded();
282 ScopedLocalRefFrame local_ref_frame(jni);
Alex Glaznev782671f2015-06-12 16:40:44 -0700283 ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d.",
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000284 (int)codecType_, codec_.width, codec_.height,
Alex Glaznev782671f2015-06-12 16:40:44 -0700285 codec_.maxFramerate);
286
287 // Release previous codec first if it was allocated before.
288 int ret_val = ReleaseOnCodecThread();
289 if (ret_val < 0) {
290 ALOGE("Release failure: %d - fallback to SW codec", ret_val);
291 sw_fallback_required_ = true;
292 return WEBRTC_VIDEO_CODEC_ERROR;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000293 }
294
Alex Glaznev782671f2015-06-12 16:40:44 -0700295 // Always start with a complete key frame.
296 key_frame_required_ = true;
297 frames_received_ = 0;
298 frames_decoded_ = 0;
299
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200300 if (use_surface_) {
301 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
302 jni, render_egl_context_);
303 }
304
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000305 jobject j_video_codec_enum = JavaEnumFromIndex(
306 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000307 bool success = jni->CallBooleanMethod(
308 *j_media_codec_video_decoder_,
309 j_init_decode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000310 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000311 codec_.width,
312 codec_.height,
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200313 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
314 : nullptr);
Alex Glaznev782671f2015-06-12 16:40:44 -0700315 if (CheckException(jni) || !success) {
316 ALOGE("Codec initialization error - fallback to SW codec.");
317 sw_fallback_required_ = true;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000318 return WEBRTC_VIDEO_CODEC_ERROR;
319 }
320 inited_ = true;
321
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000322 switch (codecType_) {
323 case kVideoCodecVP8:
324 max_pending_frames_ = kMaxPendingFramesVp8;
325 break;
326 case kVideoCodecH264:
327 max_pending_frames_ = kMaxPendingFramesH264;
328 break;
329 default:
330 max_pending_frames_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000331 }
332 start_time_ms_ = GetCurrentTimeMs();
333 current_frames_ = 0;
334 current_bytes_ = 0;
335 current_decoding_time_ms_ = 0;
336 timestamps_.clear();
337 ntp_times_ms_.clear();
338 frame_rtc_times_ms_.clear();
339
340 jobjectArray input_buffers = (jobjectArray)GetObjectField(
341 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
342 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
343 input_buffers_.resize(num_input_buffers);
344 for (size_t i = 0; i < num_input_buffers; ++i) {
345 input_buffers_[i] =
346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
Alex Glaznev782671f2015-06-12 16:40:44 -0700347 if (CheckException(jni)) {
348 ALOGE("NewGlobalRef error - fallback to SW codec.");
349 sw_fallback_required_ = true;
350 return WEBRTC_VIDEO_CODEC_ERROR;
351 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000352 }
353
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000354 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
355
356 return WEBRTC_VIDEO_CODEC_OK;
357}
358
359int32_t MediaCodecVideoDecoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000360 ALOGD("DecoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000361 return codec_thread_->Invoke<int32_t>(
362 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
363}
364
365int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
366 if (!inited_) {
367 return WEBRTC_VIDEO_CODEC_OK;
368 }
369 CheckOnCodecThread();
370 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000371 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000372 ScopedLocalRefFrame local_ref_frame(jni);
373 for (size_t i = 0; i < input_buffers_.size(); i++) {
374 jni->DeleteGlobalRef(input_buffers_[i]);
375 }
376 input_buffers_.clear();
377 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200378 surface_texture_helper_ = nullptr;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000379 inited_ = false;
Alex Glaznev782671f2015-06-12 16:40:44 -0700380 rtc::MessageQueueManager::Clear(this);
381 if (CheckException(jni)) {
382 ALOGE("Decoder release exception");
383 return WEBRTC_VIDEO_CODEC_ERROR;
384 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000385 return WEBRTC_VIDEO_CODEC_OK;
386}
387
388void MediaCodecVideoDecoder::CheckOnCodecThread() {
henrikg91d6ede2015-09-17 00:24:34 -0700389 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000390 << "Running on wrong thread!";
391}
392
Alex Glaznev782671f2015-06-12 16:40:44 -0700393int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
394 CheckOnCodecThread();
395 int ret_val = ReleaseOnCodecThread();
396 if (ret_val < 0) {
397 ALOGE("ProcessHWError: Release failure");
398 }
399 if (codecType_ == kVideoCodecH264) {
400 // For now there is no SW H.264 which can be used as fallback codec.
401 // So try to restart hw codec for now.
402 ret_val = InitDecodeOnCodecThread();
403 ALOGE("Reset H.264 codec done. Status: %d", ret_val);
404 if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
405 // H.264 codec was succesfully reset - return regular error code.
406 return WEBRTC_VIDEO_CODEC_ERROR;
407 } else {
408 // Fail to restart H.264 codec - return error code which should stop the
409 // call.
410 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
411 }
412 } else {
413 sw_fallback_required_ = true;
414 ALOGE("Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE");
415 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
416 }
417}
418
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000419int32_t MediaCodecVideoDecoder::Decode(
420 const EncodedImage& inputImage,
421 bool missingFrames,
422 const RTPFragmentationHeader* fragmentation,
423 const CodecSpecificInfo* codecSpecificInfo,
424 int64_t renderTimeMs) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700425 if (sw_fallback_required_) {
426 ALOGE("Decode() - fallback to SW codec");
427 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000428 }
429 if (callback_ == NULL) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700430 ALOGE("Decode() - callback_ is NULL");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000431 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
432 }
433 if (inputImage._buffer == NULL && inputImage._length > 0) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700434 ALOGE("Decode() - inputImage is incorrect");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000435 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
436 }
Alex Glaznev782671f2015-06-12 16:40:44 -0700437 if (!inited_) {
438 ALOGE("Decode() - decoder is not initialized");
439 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
440 }
441
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000442 // Check if encoded frame dimension has changed.
443 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
444 (inputImage._encodedWidth != codec_.width ||
445 inputImage._encodedHeight != codec_.height)) {
446 codec_.width = inputImage._encodedWidth;
447 codec_.height = inputImage._encodedHeight;
Alex Glaznev782671f2015-06-12 16:40:44 -0700448 int32_t ret = InitDecode(&codec_, 1);
449 if (ret < 0) {
450 ALOGE("InitDecode failure: %d - fallback to SW codec", ret);
451 sw_fallback_required_ = true;
452 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
453 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000454 }
455
456 // Always start with a complete key frame.
457 if (key_frame_required_) {
458 if (inputImage._frameType != webrtc::kKeyFrame) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700459 ALOGE("Decode() - key frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000460 return WEBRTC_VIDEO_CODEC_ERROR;
461 }
462 if (!inputImage._completeFrame) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700463 ALOGE("Decode() - complete frame is required");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000464 return WEBRTC_VIDEO_CODEC_ERROR;
465 }
466 key_frame_required_ = false;
467 }
468 if (inputImage._length == 0) {
469 return WEBRTC_VIDEO_CODEC_ERROR;
470 }
471
472 return codec_thread_->Invoke<int32_t>(Bind(
473 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
474}
475
476int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
477 const EncodedImage& inputImage) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000478 CheckOnCodecThread();
479 JNIEnv* jni = AttachCurrentThreadIfNeeded();
480 ScopedLocalRefFrame local_ref_frame(jni);
481
482 // Try to drain the decoder and wait until output is not too
483 // much behind the input.
484 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000485 ALOGV("Received: %d. Decoded: %d. Wait for output...",
486 frames_received_, frames_decoded_);
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200487 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700488 ALOGE("DeliverPendingOutputs error");
489 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000490 }
491 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
492 ALOGE("Output buffer dequeue timeout");
Alex Glaznev782671f2015-06-12 16:40:44 -0700493 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000494 }
495 }
496
497 // Get input buffer.
498 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
499 j_dequeue_input_buffer_method_);
Alex Glaznev782671f2015-06-12 16:40:44 -0700500 if (CheckException(jni) || j_input_buffer_index < 0) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000501 ALOGE("dequeueInputBuffer error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700502 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000503 }
504
505 // Copy encoded data to Java ByteBuffer.
506 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
Peter Boström0c4e06b2015-10-07 12:23:21 +0200507 uint8_t* buffer =
508 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
henrikg91d6ede2015-09-17 00:24:34 -0700509 RTC_CHECK(buffer) << "Indirect buffer??";
Peter Boström0c4e06b2015-10-07 12:23:21 +0200510 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
Alex Glaznev782671f2015-06-12 16:40:44 -0700511 if (CheckException(jni) || buffer_capacity < inputImage._length) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000512 ALOGE("Input frame size %d is bigger than buffer size %d.",
513 inputImage._length, buffer_capacity);
Alex Glaznev782671f2015-06-12 16:40:44 -0700514 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000515 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000516 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
517 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
518 frames_received_, inputImage._frameType, j_input_buffer_index,
519 timestamp_us / 1000, inputImage._length);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000520 memcpy(buffer, inputImage._buffer, inputImage._length);
521
522 // Save input image timestamps for later output.
523 frames_received_++;
524 current_bytes_ += inputImage._length;
525 timestamps_.push_back(inputImage._timeStamp);
526 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
527 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
528
529 // Feed input to decoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000530 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
531 j_queue_input_buffer_method_,
532 j_input_buffer_index,
533 inputImage._length,
534 timestamp_us);
Alex Glaznev782671f2015-06-12 16:40:44 -0700535 if (CheckException(jni) || !success) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000536 ALOGE("queueInputBuffer error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700537 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000538 }
539
540 // Try to drain the decoder
541 if (!DeliverPendingOutputs(jni, 0)) {
542 ALOGE("DeliverPendingOutputs error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700543 return ProcessHWErrorOnCodecThread();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000544 }
545
546 return WEBRTC_VIDEO_CODEC_OK;
547}
548
549bool MediaCodecVideoDecoder::DeliverPendingOutputs(
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200550 JNIEnv* jni, int dequeue_timeout_ms) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000551 if (frames_received_ <= frames_decoded_) {
552 // No need to query for output buffers - decoder is drained.
553 return true;
554 }
555 // Get decoder output.
magjed44bf6f52015-10-03 02:08:00 -0700556 jobject j_decoder_output_buffer = jni->CallObjectMethod(
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000557 *j_media_codec_video_decoder_,
558 j_dequeue_output_buffer_method_,
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200559 dequeue_timeout_ms);
Alex Glaznev782671f2015-06-12 16:40:44 -0700560 if (CheckException(jni)) {
magjed44bf6f52015-10-03 02:08:00 -0700561 ALOGE("dequeueOutputBuffer() error");
Alex Glaznev782671f2015-06-12 16:40:44 -0700562 return false;
563 }
magjed44bf6f52015-10-03 02:08:00 -0700564 if (IsNull(jni, j_decoder_output_buffer)) {
565 // No decoded frame ready.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000566 return true;
567 }
568
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000569 // Get decoded video frame properties.
570 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
571 j_color_format_field_);
572 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
573 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
574 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
575 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
576 j_slice_height_field_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000577
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200578 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
magjed44bf6f52015-10-03 02:08:00 -0700579 long output_timestamps_ms = 0;
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200580 if (use_surface_) {
magjed44bf6f52015-10-03 02:08:00 -0700581 // Extract data from Java DecodedTextureBuffer.
582 const int texture_id =
583 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200584 const jfloatArray j_transform_matrix =
585 reinterpret_cast<jfloatArray>(GetObjectField(
586 jni, j_decoder_output_buffer, j_transform_matrix_field_));
587 const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer,
588 j_texture_timestamp_ns_field_);
589 output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
magjed44bf6f52015-10-03 02:08:00 -0700590 // Create webrtc::VideoFrameBuffer with native texture handle.
Magnus Jedvert91b348c2015-10-07 22:57:06 +0200591 frame_buffer = surface_texture_helper_->CreateTextureFrame(
592 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200593 } else {
594 // Extract data from Java ByteBuffer and create output yuv420 frame -
595 // for non surface decoding only.
magjed44bf6f52015-10-03 02:08:00 -0700596 const int output_buffer_index =
597 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_);
598 const int output_buffer_offset =
599 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
600 const int output_buffer_size =
601 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
602 const int64_t timestamp_us = GetLongField(
603 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
604 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
605
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000606 if (output_buffer_size < width * height * 3 / 2) {
607 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
608 return false;
609 }
610 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
611 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
612 jobject output_buffer =
613 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
614 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
615 output_buffer));
Alex Glaznev782671f2015-06-12 16:40:44 -0700616 if (CheckException(jni)) {
617 return false;
618 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000619 payload += output_buffer_offset;
620
621 // Create yuv420 frame.
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200622 frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000623 if (color_format == COLOR_FormatYUV420Planar) {
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200624 RTC_CHECK_EQ(0, stride % 2);
625 RTC_CHECK_EQ(0, slice_height % 2);
626 const int uv_stride = stride / 2;
627 const int u_slice_height = slice_height / 2;
628 const uint8_t* y_ptr = payload;
629 const uint8_t* u_ptr = y_ptr + stride * slice_height;
630 const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
631 libyuv::I420Copy(y_ptr, stride,
632 u_ptr, uv_stride,
633 v_ptr, uv_stride,
634 frame_buffer->MutableData(webrtc::kYPlane),
635 frame_buffer->stride(webrtc::kYPlane),
636 frame_buffer->MutableData(webrtc::kUPlane),
637 frame_buffer->stride(webrtc::kUPlane),
638 frame_buffer->MutableData(webrtc::kVPlane),
639 frame_buffer->stride(webrtc::kVPlane),
640 width, height);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000641 } else {
642 // All other supported formats are nv12.
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200643 const uint8_t* y_ptr = payload;
644 const uint8_t* uv_ptr = y_ptr + stride * slice_height;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000645 libyuv::NV12ToI420(
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200646 y_ptr, stride,
647 uv_ptr, stride,
648 frame_buffer->MutableData(webrtc::kYPlane),
649 frame_buffer->stride(webrtc::kYPlane),
650 frame_buffer->MutableData(webrtc::kUPlane),
651 frame_buffer->stride(webrtc::kUPlane),
652 frame_buffer->MutableData(webrtc::kVPlane),
653 frame_buffer->stride(webrtc::kVPlane),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000654 width, height);
655 }
magjed44bf6f52015-10-03 02:08:00 -0700656 // Return output byte buffer back to codec.
657 jni->CallVoidMethod(
658 *j_media_codec_video_decoder_,
659 j_return_decoded_byte_buffer_method_,
660 output_buffer_index);
661 if (CheckException(jni)) {
662 ALOGE("returnDecodedByteBuffer error");
663 return false;
664 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000665 }
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200666 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000667
668 // Get frame timestamps from a queue.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000669 if (timestamps_.size() > 0) {
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200670 decoded_frame.set_timestamp(timestamps_.front());
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000671 timestamps_.erase(timestamps_.begin());
672 }
673 if (ntp_times_ms_.size() > 0) {
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200674 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000675 ntp_times_ms_.erase(ntp_times_ms_.begin());
676 }
677 int64_t frame_decoding_time_ms = 0;
678 if (frame_rtc_times_ms_.size() > 0) {
679 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
680 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
681 }
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000682 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000683 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
glaznev@webrtc.orga4623d22015-02-25 00:02:50 +0000684 color_format, output_timestamps_ms, frame_decoding_time_ms);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000685
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000686 // Calculate and print decoding statistics - every 3 seconds.
687 frames_decoded_++;
688 current_frames_++;
689 current_decoding_time_ms_ += frame_decoding_time_ms;
690 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
691 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
692 current_frames_ > 0) {
693 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
694 current_bytes_ * 8 / statistic_time_ms,
695 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
696 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
697 start_time_ms_ = GetCurrentTimeMs();
698 current_frames_ = 0;
699 current_bytes_ = 0;
700 current_decoding_time_ms_ = 0;
701 }
702
703 // Callback - output decoded frame.
Magnus Jedvertbbda54e2015-09-30 16:06:37 +0200704 const int32_t callback_status = callback_->Decoded(decoded_frame);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000705 if (callback_status > 0) {
706 ALOGE("callback error");
707 }
708
709 return true;
710}
711
712int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
713 DecodedImageCallback* callback) {
714 callback_ = callback;
715 return WEBRTC_VIDEO_CODEC_OK;
716}
717
718int32_t MediaCodecVideoDecoder::Reset() {
719 ALOGD("DecoderReset");
720 if (!inited_) {
721 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
722 }
723 return InitDecode(&codec_, 1);
724}
725
726void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
727 JNIEnv* jni = AttachCurrentThreadIfNeeded();
728 ScopedLocalRefFrame local_ref_frame(jni);
729 if (!inited_) {
730 return;
731 }
732 // We only ever send one message to |this| directly (not through a Bind()'d
733 // functor), so expect no ID/data.
henrikg91d6ede2015-09-17 00:24:34 -0700734 RTC_CHECK(!msg->message_id) << "Unexpected message!";
735 RTC_CHECK(!msg->pdata) << "Unexpected message!";
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000736 CheckOnCodecThread();
737
738 if (!DeliverPendingOutputs(jni, 0)) {
Alex Glaznev782671f2015-06-12 16:40:44 -0700739 ALOGE("OnMessage: DeliverPendingOutputs error");
740 ProcessHWErrorOnCodecThread();
741 return;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000742 }
743 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
744}
745
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700746MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
747 render_egl_context_(NULL) {
748 ALOGD("MediaCodecVideoDecoderFactory ctor");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000749 JNIEnv* jni = AttachCurrentThreadIfNeeded();
750 ScopedLocalRefFrame local_ref_frame(jni);
751 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000752 supported_codec_types_.clear();
753
754 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000755 j_decoder_class,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000756 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
Alex Glaznev782671f2015-06-12 16:40:44 -0700757 if (CheckException(jni)) {
758 is_vp8_hw_supported = false;
759 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000760 if (is_vp8_hw_supported) {
761 ALOGD("VP8 HW Decoder supported.");
762 supported_codec_types_.push_back(kVideoCodecVP8);
763 }
764
765 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
766 j_decoder_class,
767 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
Alex Glaznev782671f2015-06-12 16:40:44 -0700768 if (CheckException(jni)) {
769 is_h264_hw_supported = false;
770 }
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000771 if (is_h264_hw_supported) {
772 ALOGD("H264 HW Decoder supported.");
773 supported_codec_types_.push_back(kVideoCodecH264);
774 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000775}
776
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700777MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
778 ALOGD("MediaCodecVideoDecoderFactory dtor");
779 if (render_egl_context_) {
780 JNIEnv* jni = AttachCurrentThreadIfNeeded();
781 jni->DeleteGlobalRef(render_egl_context_);
782 render_egl_context_ = NULL;
783 }
784}
785
786void MediaCodecVideoDecoderFactory::SetEGLContext(
787 JNIEnv* jni, jobject render_egl_context) {
788 ALOGD("MediaCodecVideoDecoderFactory::SetEGLContext");
789 if (render_egl_context_) {
790 jni->DeleteGlobalRef(render_egl_context_);
791 render_egl_context_ = NULL;
792 }
793 if (!IsNull(jni, render_egl_context)) {
794 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
795 if (CheckException(jni)) {
796 ALOGE("error calling NewGlobalRef for EGL Context.");
797 render_egl_context_ = NULL;
798 } else {
799 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
800 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
801 ALOGE("Wrong EGL Context.");
802 jni->DeleteGlobalRef(render_egl_context_);
803 render_egl_context_ = NULL;
804 }
805 }
806 }
807 if (render_egl_context_ == NULL) {
808 ALOGW("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
809 }
810}
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000811
812webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000813 VideoCodecType type) {
814 if (supported_codec_types_.empty()) {
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700815 ALOGE("No HW video decoder for type %d.", (int)type);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000816 return NULL;
817 }
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700818 for (VideoCodecType codec_type : supported_codec_types_) {
819 if (codec_type == type) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000820 ALOGD("Create HW video decoder for type %d.", (int)type);
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700821 return new MediaCodecVideoDecoder(
822 AttachCurrentThreadIfNeeded(), type, render_egl_context_);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000823 }
824 }
Alex Glaznev4d2f4d12015-09-01 15:04:13 -0700825 ALOGE("Can not find HW video decoder for type %d.", (int)type);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000826 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000827}
828
829void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
830 webrtc::VideoDecoder* decoder) {
831 delete decoder;
832}
833
834} // namespace webrtc_jni
835