blob: 008bbab2a777e5e2b51d1d3920461ecd384f4520 [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include <vector>
30
31#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
32#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33#include "talk/app/webrtc/java/jni/classreferenceholder.h"
34#include "talk/app/webrtc/java/jni/native_handle_impl.h"
35#include "webrtc/base/bind.h"
36#include "webrtc/base/checks.h"
37#include "webrtc/base/logging.h"
38#include "webrtc/base/thread.h"
39#include "webrtc/common_video/interface/texture_video_frame.h"
40#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
41#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
42#include "webrtc/system_wrappers/interface/tick_util.h"
43#include "third_party/libyuv/include/libyuv/convert.h"
44#include "third_party/libyuv/include/libyuv/convert_from.h"
45#include "third_party/libyuv/include/libyuv/video_common.h"
46
47using rtc::Bind;
48using rtc::Thread;
49using rtc::ThreadManager;
50using rtc::scoped_ptr;
51
52using webrtc::CodecSpecificInfo;
53using webrtc::DecodedImageCallback;
54using webrtc::EncodedImage;
55using webrtc::I420VideoFrame;
56using webrtc::RTPFragmentationHeader;
57using webrtc::TextureVideoFrame;
58using webrtc::TickTime;
59using webrtc::VideoCodec;
60using webrtc::kVideoCodecVP8;
61
62namespace webrtc_jni {
63
64jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
65
66class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
67 public rtc::MessageHandler {
68 public:
69 explicit MediaCodecVideoDecoder(JNIEnv* jni);
70 virtual ~MediaCodecVideoDecoder();
71
72 static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
73
74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
75 override;
76
77 int32_t Decode(
78 const EncodedImage& inputImage, bool missingFrames,
79 const RTPFragmentationHeader* fragmentation,
80 const CodecSpecificInfo* codecSpecificInfo = NULL,
81 int64_t renderTimeMs = -1) override;
82
83 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
84 override;
85
86 int32_t Release() override;
87
88 int32_t Reset() override;
89 // rtc::MessageHandler implementation.
90 void OnMessage(rtc::Message* msg) override;
91
92 private:
93 // CHECK-fail if not running on |codec_thread_|.
94 void CheckOnCodecThread();
95
96 int32_t InitDecodeOnCodecThread();
97 int32_t ReleaseOnCodecThread();
98 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
99 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
100 // true on success.
101 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
102
103 bool key_frame_required_;
104 bool inited_;
105 bool use_surface_;
106 int error_count_;
107 VideoCodec codec_;
108 I420VideoFrame decoded_image_;
109 NativeHandleImpl native_handle_;
110 DecodedImageCallback* callback_;
111 int frames_received_; // Number of frames received by decoder.
112 int frames_decoded_; // Number of frames decoded by decoder
113 int64_t start_time_ms_; // Start time for statistics.
114 int current_frames_; // Number of frames in the current statistics interval.
115 int current_bytes_; // Encoded bytes in the current statistics interval.
116 int current_decoding_time_ms_; // Overall decoding time in the current second
117 uint32_t max_pending_frames_; // Maximum number of pending input frames
118 std::vector<int32_t> timestamps_;
119 std::vector<int64_t> ntp_times_ms_;
120 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
121 // decoder input.
122
123 // State that is constant for the lifetime of this object once the ctor
124 // returns.
125 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
126 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
127 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
128 jmethodID j_init_decode_method_;
129 jmethodID j_release_method_;
130 jmethodID j_dequeue_input_buffer_method_;
131 jmethodID j_queue_input_buffer_method_;
132 jmethodID j_dequeue_output_buffer_method_;
133 jmethodID j_release_output_buffer_method_;
134 // MediaCodecVideoDecoder fields.
135 jfieldID j_input_buffers_field_;
136 jfieldID j_output_buffers_field_;
137 jfieldID j_color_format_field_;
138 jfieldID j_width_field_;
139 jfieldID j_height_field_;
140 jfieldID j_stride_field_;
141 jfieldID j_slice_height_field_;
142 jfieldID j_surface_texture_field_;
143 jfieldID j_textureID_field_;
144 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
145 jfieldID j_info_index_field_;
146 jfieldID j_info_offset_field_;
147 jfieldID j_info_size_field_;
148 jfieldID j_info_presentation_timestamp_us_field_;
149
150 // Global references; must be deleted in Release().
151 std::vector<jobject> input_buffers_;
152 jobject surface_texture_;
153 jobject previous_surface_texture_;
154};
155
156MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
157 : key_frame_required_(true),
158 inited_(false),
159 error_count_(0),
160 surface_texture_(NULL),
161 previous_surface_texture_(NULL),
162 codec_thread_(new Thread()),
163 j_media_codec_video_decoder_class_(
164 jni,
165 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
166 j_media_codec_video_decoder_(
167 jni,
168 jni->NewObject(*j_media_codec_video_decoder_class_,
169 GetMethodID(jni,
170 *j_media_codec_video_decoder_class_,
171 "<init>",
172 "()V"))) {
173 ScopedLocalRefFrame local_ref_frame(jni);
174 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
175 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
176
177 j_init_decode_method_ = GetMethodID(
178 jni, *j_media_codec_video_decoder_class_, "initDecode",
179 "(IIZZLandroid/opengl/EGLContext;)Z");
180 j_release_method_ =
181 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
182 j_dequeue_input_buffer_method_ = GetMethodID(
183 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
184 j_queue_input_buffer_method_ = GetMethodID(
185 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
186 j_dequeue_output_buffer_method_ = GetMethodID(
187 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
188 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
189 j_release_output_buffer_method_ = GetMethodID(
190 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
191
192 j_input_buffers_field_ = GetFieldID(
193 jni, *j_media_codec_video_decoder_class_,
194 "inputBuffers", "[Ljava/nio/ByteBuffer;");
195 j_output_buffers_field_ = GetFieldID(
196 jni, *j_media_codec_video_decoder_class_,
197 "outputBuffers", "[Ljava/nio/ByteBuffer;");
198 j_color_format_field_ = GetFieldID(
199 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
200 j_width_field_ = GetFieldID(
201 jni, *j_media_codec_video_decoder_class_, "width", "I");
202 j_height_field_ = GetFieldID(
203 jni, *j_media_codec_video_decoder_class_, "height", "I");
204 j_stride_field_ = GetFieldID(
205 jni, *j_media_codec_video_decoder_class_, "stride", "I");
206 j_slice_height_field_ = GetFieldID(
207 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
208 j_textureID_field_ = GetFieldID(
209 jni, *j_media_codec_video_decoder_class_, "textureID", "I");
210 j_surface_texture_field_ = GetFieldID(
211 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
212 "Landroid/graphics/SurfaceTexture;");
213
214 jclass j_decoder_output_buffer_info_class = FindClass(jni,
215 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
216 j_info_index_field_ = GetFieldID(
217 jni, j_decoder_output_buffer_info_class, "index", "I");
218 j_info_offset_field_ = GetFieldID(
219 jni, j_decoder_output_buffer_info_class, "offset", "I");
220 j_info_size_field_ = GetFieldID(
221 jni, j_decoder_output_buffer_info_class, "size", "I");
222 j_info_presentation_timestamp_us_field_ = GetFieldID(
223 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
224
225 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
226 use_surface_ = true;
227 if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL)
228 use_surface_ = false;
229 memset(&codec_, 0, sizeof(codec_));
230 AllowBlockingCalls();
231}
232
233MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
234 // Call Release() to ensure no more callbacks to us after we are deleted.
235 Release();
236 // Delete global references.
237 JNIEnv* jni = AttachCurrentThreadIfNeeded();
238 if (previous_surface_texture_ != NULL)
239 jni->DeleteGlobalRef(previous_surface_texture_);
240 if (surface_texture_ != NULL)
241 jni->DeleteGlobalRef(surface_texture_);
242}
243
244int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
245 int32_t numberOfCores) {
246 if (inst == NULL) {
247 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
248 }
249 int ret_val = Release();
250 if (ret_val < 0) {
251 return ret_val;
252 }
253 // Save VideoCodec instance for later.
254 if (&codec_ != inst) {
255 codec_ = *inst;
256 }
257 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
258
259 // Always start with a complete key frame.
260 key_frame_required_ = true;
261 frames_received_ = 0;
262 frames_decoded_ = 0;
263
264 // Call Java init.
265 return codec_thread_->Invoke<int32_t>(
266 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
267}
268
269int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
270 CheckOnCodecThread();
271 JNIEnv* jni = AttachCurrentThreadIfNeeded();
272 ScopedLocalRefFrame local_ref_frame(jni);
273 ALOGD("InitDecodeOnCodecThread: %d x %d. Fps: %d. Errors: %d",
274 codec_.width, codec_.height, codec_.maxFramerate, error_count_);
275 bool use_sw_codec = false;
276 if (error_count_ > 1) {
277 // If more than one critical errors happen for HW codec, switch to SW codec.
278 use_sw_codec = true;
279 }
280
281 bool success = jni->CallBooleanMethod(
282 *j_media_codec_video_decoder_,
283 j_init_decode_method_,
284 codec_.width,
285 codec_.height,
286 use_sw_codec,
287 use_surface_,
288 MediaCodecVideoDecoderFactory::render_egl_context_);
289 CHECK_EXCEPTION(jni);
290 if (!success) {
291 return WEBRTC_VIDEO_CODEC_ERROR;
292 }
293 inited_ = true;
294
295 max_pending_frames_ = 0;
296 if (use_surface_) {
297 max_pending_frames_ = 1;
298 }
299 start_time_ms_ = GetCurrentTimeMs();
300 current_frames_ = 0;
301 current_bytes_ = 0;
302 current_decoding_time_ms_ = 0;
303 timestamps_.clear();
304 ntp_times_ms_.clear();
305 frame_rtc_times_ms_.clear();
306
307 jobjectArray input_buffers = (jobjectArray)GetObjectField(
308 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
309 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
310 input_buffers_.resize(num_input_buffers);
311 for (size_t i = 0; i < num_input_buffers; ++i) {
312 input_buffers_[i] =
313 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
314 CHECK_EXCEPTION(jni);
315 }
316
317 if (use_surface_) {
318 jobject surface_texture = GetObjectField(
319 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
320 if (previous_surface_texture_ != NULL) {
321 jni->DeleteGlobalRef(previous_surface_texture_);
322 }
323 previous_surface_texture_ = surface_texture_;
324 surface_texture_ = jni->NewGlobalRef(surface_texture);
325 }
326 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
327
328 return WEBRTC_VIDEO_CODEC_OK;
329}
330
331int32_t MediaCodecVideoDecoder::Release() {
332 return codec_thread_->Invoke<int32_t>(
333 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
334}
335
336int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
337 if (!inited_) {
338 return WEBRTC_VIDEO_CODEC_OK;
339 }
340 CheckOnCodecThread();
341 JNIEnv* jni = AttachCurrentThreadIfNeeded();
342 ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
343 ScopedLocalRefFrame local_ref_frame(jni);
344 for (size_t i = 0; i < input_buffers_.size(); i++) {
345 jni->DeleteGlobalRef(input_buffers_[i]);
346 }
347 input_buffers_.clear();
348 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
349 CHECK_EXCEPTION(jni);
350 rtc::MessageQueueManager::Clear(this);
351 inited_ = false;
352 return WEBRTC_VIDEO_CODEC_OK;
353}
354
355void MediaCodecVideoDecoder::CheckOnCodecThread() {
356 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
357 << "Running on wrong thread!";
358}
359
360int32_t MediaCodecVideoDecoder::Decode(
361 const EncodedImage& inputImage,
362 bool missingFrames,
363 const RTPFragmentationHeader* fragmentation,
364 const CodecSpecificInfo* codecSpecificInfo,
365 int64_t renderTimeMs) {
366 if (!inited_) {
367 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
368 }
369 if (callback_ == NULL) {
370 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
371 }
372 if (inputImage._buffer == NULL && inputImage._length > 0) {
373 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
374 }
375 // Check if encoded frame dimension has changed.
376 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
377 (inputImage._encodedWidth != codec_.width ||
378 inputImage._encodedHeight != codec_.height)) {
379 codec_.width = inputImage._encodedWidth;
380 codec_.height = inputImage._encodedHeight;
381 InitDecode(&codec_, 1);
382 }
383
384 // Always start with a complete key frame.
385 if (key_frame_required_) {
386 if (inputImage._frameType != webrtc::kKeyFrame) {
387 return WEBRTC_VIDEO_CODEC_ERROR;
388 }
389 if (!inputImage._completeFrame) {
390 return WEBRTC_VIDEO_CODEC_ERROR;
391 }
392 key_frame_required_ = false;
393 }
394 if (inputImage._length == 0) {
395 return WEBRTC_VIDEO_CODEC_ERROR;
396 }
397
398 return codec_thread_->Invoke<int32_t>(Bind(
399 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
400}
401
402int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
403 const EncodedImage& inputImage) {
404 static uint8_t yVal_ = 0x7f;
405
406 CheckOnCodecThread();
407 JNIEnv* jni = AttachCurrentThreadIfNeeded();
408 ScopedLocalRefFrame local_ref_frame(jni);
409
410 // Try to drain the decoder and wait until output is not too
411 // much behind the input.
412 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
413 ALOGV("Wait for output...");
414 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
415 error_count_++;
416 Reset();
417 return WEBRTC_VIDEO_CODEC_ERROR;
418 }
419 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
420 ALOGE("Output buffer dequeue timeout");
421 error_count_++;
422 Reset();
423 return WEBRTC_VIDEO_CODEC_ERROR;
424 }
425 }
426
427 // Get input buffer.
428 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
429 j_dequeue_input_buffer_method_);
430 CHECK_EXCEPTION(jni);
431 if (j_input_buffer_index < 0) {
432 ALOGE("dequeueInputBuffer error");
433 error_count_++;
434 Reset();
435 return WEBRTC_VIDEO_CODEC_ERROR;
436 }
437
438 // Copy encoded data to Java ByteBuffer.
439 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
440 uint8* buffer =
441 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
442 CHECK(buffer) << "Indirect buffer??";
443 int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
444 CHECK_EXCEPTION(jni);
445 if (buffer_capacity < inputImage._length) {
446 ALOGE("Input frame size %d is bigger than buffer size %d.",
447 inputImage._length, buffer_capacity);
448 error_count_++;
449 Reset();
450 return WEBRTC_VIDEO_CODEC_ERROR;
451 }
452 ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
453 frames_received_, j_input_buffer_index, inputImage._length);
454 memcpy(buffer, inputImage._buffer, inputImage._length);
455
456 // Save input image timestamps for later output.
457 frames_received_++;
458 current_bytes_ += inputImage._length;
459 timestamps_.push_back(inputImage._timeStamp);
460 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
461 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
462
463 // Feed input to decoder.
464 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
465 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
466 j_queue_input_buffer_method_,
467 j_input_buffer_index,
468 inputImage._length,
469 timestamp_us);
470 CHECK_EXCEPTION(jni);
471 if (!success) {
472 ALOGE("queueInputBuffer error");
473 error_count_++;
474 Reset();
475 return WEBRTC_VIDEO_CODEC_ERROR;
476 }
477
478 // Try to drain the decoder
479 if (!DeliverPendingOutputs(jni, 0)) {
480 ALOGE("DeliverPendingOutputs error");
481 error_count_++;
482 Reset();
483 return WEBRTC_VIDEO_CODEC_ERROR;
484 }
485
486 return WEBRTC_VIDEO_CODEC_OK;
487}
488
489bool MediaCodecVideoDecoder::DeliverPendingOutputs(
490 JNIEnv* jni, int dequeue_timeout_us) {
491 if (frames_received_ <= frames_decoded_) {
492 // No need to query for output buffers - decoder is drained.
493 return true;
494 }
495 // Get decoder output.
496 jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
497 *j_media_codec_video_decoder_,
498 j_dequeue_output_buffer_method_,
499 dequeue_timeout_us);
500
501 CHECK_EXCEPTION(jni);
502 if (IsNull(jni, j_decoder_output_buffer_info)) {
503 return true;
504 }
505
506 // Extract output buffer info from Java DecoderOutputBufferInfo.
507 int output_buffer_index =
508 GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
509 if (output_buffer_index < 0) {
510 ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
511 return false;
512 }
513 int output_buffer_offset =
514 GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
515 int output_buffer_size =
516 GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
517 CHECK_EXCEPTION(jni);
518
519 // Get decoded video frame properties.
520 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
521 j_color_format_field_);
522 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
523 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
524 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
525 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
526 j_slice_height_field_);
527 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
528 j_textureID_field_);
529
530 // Extract data from Java ByteBuffer and create output yuv420 frame -
531 // for non surface decoding only.
532 if (!use_surface_) {
533 if (output_buffer_size < width * height * 3 / 2) {
534 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
535 return false;
536 }
537 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
538 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
539 jobject output_buffer =
540 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
541 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
542 output_buffer));
543 CHECK_EXCEPTION(jni);
544 payload += output_buffer_offset;
545
546 // Create yuv420 frame.
547 if (color_format == COLOR_FormatYUV420Planar) {
548 decoded_image_.CreateFrame(
549 stride * slice_height, payload,
550 (stride * slice_height) / 4, payload + (stride * slice_height),
551 (stride * slice_height) / 4,
552 payload + (5 * stride * slice_height / 4),
553 width, height,
554 stride, stride / 2, stride / 2);
555 } else {
556 // All other supported formats are nv12.
557 decoded_image_.CreateEmptyFrame(width, height, width,
558 width / 2, width / 2);
559 libyuv::NV12ToI420(
560 payload, stride,
561 payload + stride * slice_height, stride,
562 decoded_image_.buffer(webrtc::kYPlane),
563 decoded_image_.stride(webrtc::kYPlane),
564 decoded_image_.buffer(webrtc::kUPlane),
565 decoded_image_.stride(webrtc::kUPlane),
566 decoded_image_.buffer(webrtc::kVPlane),
567 decoded_image_.stride(webrtc::kVPlane),
568 width, height);
569 }
570 }
571
572 // Get frame timestamps from a queue.
573 int32_t timestamp = timestamps_.front();
574 timestamps_.erase(timestamps_.begin());
575 int64_t ntp_time_ms = ntp_times_ms_.front();
576 ntp_times_ms_.erase(ntp_times_ms_.begin());
577 int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
578 frame_rtc_times_ms_.front();
579 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
580
581 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
582 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
583 color_format, output_buffer_size, frame_decoding_time_ms);
584
585 // Return output buffer back to codec.
586 bool success = jni->CallBooleanMethod(
587 *j_media_codec_video_decoder_,
588 j_release_output_buffer_method_,
589 output_buffer_index,
590 use_surface_);
591 CHECK_EXCEPTION(jni);
592 if (!success) {
593 ALOGE("releaseOutputBuffer error");
594 return false;
595 }
596
597 // Calculate and print decoding statistics - every 3 seconds.
598 frames_decoded_++;
599 current_frames_++;
600 current_decoding_time_ms_ += frame_decoding_time_ms;
601 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
602 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
603 current_frames_ > 0) {
604 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
605 current_bytes_ * 8 / statistic_time_ms,
606 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
607 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
608 start_time_ms_ = GetCurrentTimeMs();
609 current_frames_ = 0;
610 current_bytes_ = 0;
611 current_decoding_time_ms_ = 0;
612 }
613
614 // Callback - output decoded frame.
615 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
616 if (use_surface_) {
617 native_handle_.SetTextureObject(surface_texture_, texture_id);
618 TextureVideoFrame texture_image(
619 &native_handle_, width, height, timestamp, 0);
620 texture_image.set_ntp_time_ms(ntp_time_ms);
621 callback_status = callback_->Decoded(texture_image);
622 } else {
623 decoded_image_.set_timestamp(timestamp);
624 decoded_image_.set_ntp_time_ms(ntp_time_ms);
625 callback_status = callback_->Decoded(decoded_image_);
626 }
627 if (callback_status > 0) {
628 ALOGE("callback error");
629 }
630
631 return true;
632}
633
634int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
635 DecodedImageCallback* callback) {
636 callback_ = callback;
637 return WEBRTC_VIDEO_CODEC_OK;
638}
639
640int32_t MediaCodecVideoDecoder::Reset() {
641 ALOGD("DecoderReset");
642 if (!inited_) {
643 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
644 }
645 return InitDecode(&codec_, 1);
646}
647
648void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
649 JNIEnv* jni = AttachCurrentThreadIfNeeded();
650 ScopedLocalRefFrame local_ref_frame(jni);
651 if (!inited_) {
652 return;
653 }
654 // We only ever send one message to |this| directly (not through a Bind()'d
655 // functor), so expect no ID/data.
656 CHECK(!msg->message_id) << "Unexpected message!";
657 CHECK(!msg->pdata) << "Unexpected message!";
658 CheckOnCodecThread();
659
660 if (!DeliverPendingOutputs(jni, 0)) {
661 error_count_++;
662 Reset();
663 }
664 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
665}
666
667int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
668 jobject render_egl_context) {
669 ALOGD("SetAndroidObjects for surface decoding.");
670 if (render_egl_context_) {
671 jni->DeleteGlobalRef(render_egl_context_);
672 }
673 if (IsNull(jni, render_egl_context)) {
674 render_egl_context_ = NULL;
675 } else {
676 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
677 CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
678 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
679 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
680 ALOGE("Wrong EGL Context.");
681 jni->DeleteGlobalRef(render_egl_context_);
682 render_egl_context_ = NULL;
683 }
684 }
685 if (render_egl_context_ == NULL) {
686 ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
687 }
688 return 0;
689}
690
691MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
692 JNIEnv* jni = AttachCurrentThreadIfNeeded();
693 ScopedLocalRefFrame local_ref_frame(jni);
694 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
695 is_platform_supported_ = jni->CallStaticBooleanMethod(
696 j_decoder_class,
697 GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
698 CHECK_EXCEPTION(jni);
699}
700
701MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
702
703webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
704 webrtc::VideoCodecType type) {
705 if (type != kVideoCodecVP8 || !is_platform_supported_) {
706 return NULL;
707 }
708 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
709}
710
711void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
712 webrtc::VideoDecoder* decoder) {
713 delete decoder;
714}
715
716} // namespace webrtc_jni
717