blob: 702843195c124036d6bf09abaa988e06e2559676 [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
30#include "talk/app/webrtc/java/jni/classreferenceholder.h"
31#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
32#include "webrtc/base/bind.h"
33#include "webrtc/base/checks.h"
34#include "webrtc/base/logging.h"
35#include "webrtc/base/thread.h"
36#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
37#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
38#include "third_party/libyuv/include/libyuv/convert.h"
39#include "third_party/libyuv/include/libyuv/convert_from.h"
40#include "third_party/libyuv/include/libyuv/video_common.h"
41
42using rtc::Bind;
43using rtc::Thread;
44using rtc::ThreadManager;
45using rtc::scoped_ptr;
46
47using webrtc::CodecSpecificInfo;
48using webrtc::EncodedImage;
49using webrtc::I420VideoFrame;
50using webrtc::RTPFragmentationHeader;
51using webrtc::VideoCodec;
52using webrtc::kVideoCodecVP8;
53
54namespace webrtc_jni {
55
56// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
57// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
58// HW-backed video encode. This C++ class is implemented as a very thin shim,
59// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
60// MediaCodecVideoEncoder is created, operated, and destroyed on a single
61// thread, currently the libjingle Worker thread.
62class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
63 public rtc::MessageHandler {
64 public:
65 virtual ~MediaCodecVideoEncoder();
66 explicit MediaCodecVideoEncoder(JNIEnv* jni);
67
68 // webrtc::VideoEncoder implementation. Everything trampolines to
69 // |codec_thread_| for execution.
70 int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
71 int32_t /* number_of_cores */,
72 size_t /* max_payload_size */) override;
73 int32_t Encode(
74 const webrtc::I420VideoFrame& input_image,
75 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
76 const std::vector<webrtc::VideoFrameType>* frame_types) override;
77 int32_t RegisterEncodeCompleteCallback(
78 webrtc::EncodedImageCallback* callback) override;
79 int32_t Release() override;
80 int32_t SetChannelParameters(uint32_t /* packet_loss */,
81 int64_t /* rtt */) override;
82 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
83
84 // rtc::MessageHandler implementation.
85 void OnMessage(rtc::Message* msg) override;
86
87 private:
88 // CHECK-fail if not running on |codec_thread_|.
89 void CheckOnCodecThread();
90
91 // Release() and InitEncode() in an attempt to restore the codec to an
92 // operable state. Necessary after all manner of OMX-layer errors.
93 void ResetCodec();
94
95 // Implementation of webrtc::VideoEncoder methods above, all running on the
96 // codec thread exclusively.
97 //
98 // If width==0 then this is assumed to be a re-initialization and the
99 // previously-current values are reused instead of the passed parameters
100 // (makes it easier to reason about thread-safety).
101 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
102 int32_t EncodeOnCodecThread(
103 const webrtc::I420VideoFrame& input_image,
104 const std::vector<webrtc::VideoFrameType>* frame_types);
105 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
106 webrtc::EncodedImageCallback* callback);
107 int32_t ReleaseOnCodecThread();
108 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
109
110 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
111 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
112 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
113 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
114 jlong GetOutputBufferInfoPresentationTimestampUs(
115 JNIEnv* jni,
116 jobject j_output_buffer_info);
117
118 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
119 // true on success.
120 bool DeliverPendingOutputs(JNIEnv* jni);
121
122 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
123 // |codec_thread_| synchronously.
124 webrtc::EncodedImageCallback* callback_;
125
126 // State that is constant for the lifetime of this object once the ctor
127 // returns.
128 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
129 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
130 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
131 jmethodID j_init_encode_method_;
132 jmethodID j_dequeue_input_buffer_method_;
133 jmethodID j_encode_method_;
134 jmethodID j_release_method_;
135 jmethodID j_set_rates_method_;
136 jmethodID j_dequeue_output_buffer_method_;
137 jmethodID j_release_output_buffer_method_;
138 jfieldID j_color_format_field_;
139 jfieldID j_info_index_field_;
140 jfieldID j_info_buffer_field_;
141 jfieldID j_info_is_key_frame_field_;
142 jfieldID j_info_presentation_timestamp_us_field_;
143
144 // State that is valid only between InitEncode() and the next Release().
145 // Touched only on codec_thread_ so no explicit synchronization necessary.
146 int width_; // Frame width in pixels.
147 int height_; // Frame height in pixels.
148 bool inited_;
149 uint16_t picture_id_;
150 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
151 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
152 int last_set_fps_; // Last-requested frame rate.
153 int64_t current_timestamp_us_; // Current frame timestamps in us.
154 int frames_received_; // Number of frames received by encoder.
155 int frames_dropped_; // Number of frames dropped by encoder.
156 int frames_resolution_update_; // Number of frames with new codec resolution.
157 int frames_in_queue_; // Number of frames in encoder queue.
158 int64_t start_time_ms_; // Start time for statistics.
159 int current_frames_; // Number of frames in the current statistics interval.
160 int current_bytes_; // Encoded bytes in the current statistics interval.
161 int current_encoding_time_ms_; // Overall encoding time in the current second
162 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
163 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
164 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
165 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
166 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
167 // encoder input.
168 // Frame size in bytes fed to MediaCodec.
169 int yuv_size_;
170 // True only when between a callback_->Encoded() call return a positive value
171 // and the next Encode() call being ignored.
172 bool drop_next_input_frame_;
173 // Global references; must be deleted in Release().
174 std::vector<jobject> input_buffers_;
175};
176
177MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
178 // Call Release() to ensure no more callbacks to us after we are deleted.
179 Release();
180}
181
182MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
183 : callback_(NULL),
184 inited_(false),
185 picture_id_(0),
186 codec_thread_(new Thread()),
187 j_media_codec_video_encoder_class_(
188 jni,
189 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
190 j_media_codec_video_encoder_(
191 jni,
192 jni->NewObject(*j_media_codec_video_encoder_class_,
193 GetMethodID(jni,
194 *j_media_codec_video_encoder_class_,
195 "<init>",
196 "()V"))) {
197 ScopedLocalRefFrame local_ref_frame(jni);
198 // It would be nice to avoid spinning up a new thread per MediaCodec, and
199 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
200 // 2732 means that deadlocks abound. This class synchronously trampolines
201 // to |codec_thread_|, so if anything else can be coming to _us_ from
202 // |codec_thread_|, or from any thread holding the |_sendCritSect| described
203 // in the bug, we have a problem. For now work around that with a dedicated
204 // thread.
205 codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
206 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
207
208 jclass j_output_buffer_info_class =
209 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
210 j_init_encode_method_ = GetMethodID(jni,
211 *j_media_codec_video_encoder_class_,
212 "initEncode",
213 "(IIII)[Ljava/nio/ByteBuffer;");
214 j_dequeue_input_buffer_method_ = GetMethodID(
215 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
216 j_encode_method_ = GetMethodID(
217 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
218 j_release_method_ =
219 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
220 j_set_rates_method_ = GetMethodID(
221 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
222 j_dequeue_output_buffer_method_ =
223 GetMethodID(jni,
224 *j_media_codec_video_encoder_class_,
225 "dequeueOutputBuffer",
226 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
227 j_release_output_buffer_method_ = GetMethodID(
228 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
229
230 j_color_format_field_ =
231 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
232 j_info_index_field_ =
233 GetFieldID(jni, j_output_buffer_info_class, "index", "I");
234 j_info_buffer_field_ = GetFieldID(
235 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
236 j_info_is_key_frame_field_ =
237 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
238 j_info_presentation_timestamp_us_field_ = GetFieldID(
239 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
240 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
241 AllowBlockingCalls();
242}
243
244int32_t MediaCodecVideoEncoder::InitEncode(
245 const webrtc::VideoCodec* codec_settings,
246 int32_t /* number_of_cores */,
247 size_t /* max_payload_size */) {
248 // Factory should guard against other codecs being used with us.
249 CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
250
251 return codec_thread_->Invoke<int32_t>(
252 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
253 this,
254 codec_settings->width,
255 codec_settings->height,
256 codec_settings->startBitrate,
257 codec_settings->maxFramerate));
258}
259
260int32_t MediaCodecVideoEncoder::Encode(
261 const webrtc::I420VideoFrame& frame,
262 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
263 const std::vector<webrtc::VideoFrameType>* frame_types) {
264 return codec_thread_->Invoke<int32_t>(Bind(
265 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
266}
267
268int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
269 webrtc::EncodedImageCallback* callback) {
270 return codec_thread_->Invoke<int32_t>(
271 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
272 this,
273 callback));
274}
275
276int32_t MediaCodecVideoEncoder::Release() {
277 return codec_thread_->Invoke<int32_t>(
278 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
279}
280
281int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
282 int64_t /* rtt */) {
283 return WEBRTC_VIDEO_CODEC_OK;
284}
285
286int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
287 uint32_t frame_rate) {
288 return codec_thread_->Invoke<int32_t>(
289 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
290 this,
291 new_bit_rate,
292 frame_rate));
293}
294
295void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
296 JNIEnv* jni = AttachCurrentThreadIfNeeded();
297 ScopedLocalRefFrame local_ref_frame(jni);
298
299 // We only ever send one message to |this| directly (not through a Bind()'d
300 // functor), so expect no ID/data.
301 CHECK(!msg->message_id) << "Unexpected message!";
302 CHECK(!msg->pdata) << "Unexpected message!";
303 CheckOnCodecThread();
304 if (!inited_) {
305 return;
306 }
307
308 // It would be nice to recover from a failure here if one happened, but it's
309 // unclear how to signal such a failure to the app, so instead we stay silent
310 // about it and let the next app-called API method reveal the borkedness.
311 DeliverPendingOutputs(jni);
312 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
313}
314
315void MediaCodecVideoEncoder::CheckOnCodecThread() {
316 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
317 << "Running on wrong thread!";
318}
319
320void MediaCodecVideoEncoder::ResetCodec() {
321 ALOGE("ResetCodec");
322 if (Release() != WEBRTC_VIDEO_CODEC_OK ||
323 codec_thread_->Invoke<int32_t>(Bind(
324 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
325 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
326 // TODO(fischman): wouldn't it be nice if there was a way to gracefully
327 // degrade to a SW encoder at this point? There isn't one AFAICT :(
328 // https://code.google.com/p/webrtc/issues/detail?id=2920
329 }
330}
331
332int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
333 int width, int height, int kbps, int fps) {
334 CheckOnCodecThread();
335 JNIEnv* jni = AttachCurrentThreadIfNeeded();
336 ScopedLocalRefFrame local_ref_frame(jni);
337
338 ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
339 width, height, kbps, fps);
340 if (kbps == 0) {
341 kbps = last_set_bitrate_kbps_;
342 }
343 if (fps == 0) {
344 fps = last_set_fps_;
345 }
346
347 width_ = width;
348 height_ = height;
349 last_set_bitrate_kbps_ = kbps;
350 last_set_fps_ = fps;
351 yuv_size_ = width_ * height_ * 3 / 2;
352 frames_received_ = 0;
353 frames_dropped_ = 0;
354 frames_resolution_update_ = 0;
355 frames_in_queue_ = 0;
356 current_timestamp_us_ = 0;
357 start_time_ms_ = GetCurrentTimeMs();
358 current_frames_ = 0;
359 current_bytes_ = 0;
360 current_encoding_time_ms_ = 0;
361 last_input_timestamp_ms_ = -1;
362 last_output_timestamp_ms_ = -1;
363 timestamps_.clear();
364 render_times_ms_.clear();
365 frame_rtc_times_ms_.clear();
366 drop_next_input_frame_ = false;
367 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
368 // We enforce no extra stride/padding in the format creation step.
369 jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
370 jni->CallObjectMethod(*j_media_codec_video_encoder_,
371 j_init_encode_method_,
372 width_,
373 height_,
374 kbps,
375 fps));
376 CHECK_EXCEPTION(jni);
377 if (IsNull(jni, input_buffers))
378 return WEBRTC_VIDEO_CODEC_ERROR;
379
380 inited_ = true;
381 switch (GetIntField(jni, *j_media_codec_video_encoder_,
382 j_color_format_field_)) {
383 case COLOR_FormatYUV420Planar:
384 encoder_fourcc_ = libyuv::FOURCC_YU12;
385 break;
386 case COLOR_FormatYUV420SemiPlanar:
387 case COLOR_QCOM_FormatYUV420SemiPlanar:
388 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
389 encoder_fourcc_ = libyuv::FOURCC_NV12;
390 break;
391 default:
392 LOG(LS_ERROR) << "Wrong color format.";
393 return WEBRTC_VIDEO_CODEC_ERROR;
394 }
395 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
396 CHECK(input_buffers_.empty())
397 << "Unexpected double InitEncode without Release";
398 input_buffers_.resize(num_input_buffers);
399 for (size_t i = 0; i < num_input_buffers; ++i) {
400 input_buffers_[i] =
401 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
402 int64 yuv_buffer_capacity =
403 jni->GetDirectBufferCapacity(input_buffers_[i]);
404 CHECK_EXCEPTION(jni);
405 CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
406 }
407 CHECK_EXCEPTION(jni);
408
409 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
410 return WEBRTC_VIDEO_CODEC_OK;
411}
412
413int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
414 const webrtc::I420VideoFrame& frame,
415 const std::vector<webrtc::VideoFrameType>* frame_types) {
416 CheckOnCodecThread();
417 JNIEnv* jni = AttachCurrentThreadIfNeeded();
418 ScopedLocalRefFrame local_ref_frame(jni);
419
420 if (!inited_) {
421 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
422 }
423 frames_received_++;
424 if (!DeliverPendingOutputs(jni)) {
425 ResetCodec();
426 // Continue as if everything's fine.
427 }
428
429 if (drop_next_input_frame_) {
430 ALOGV("Encoder drop frame - failed callback.");
431 drop_next_input_frame_ = false;
432 return WEBRTC_VIDEO_CODEC_OK;
433 }
434
435 CHECK(frame_types->size() == 1) << "Unexpected stream count";
436 if (frame.width() != width_ || frame.height() != height_) {
437 frames_resolution_update_++;
438 ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
439 width_, height_, frame.width(), frame.height());
440 if (frames_resolution_update_ > 3) {
441 // Reset codec if we received more than 3 frames with new resolution.
442 width_ = frame.width();
443 height_ = frame.height();
444 frames_resolution_update_ = 0;
445 ResetCodec();
446 }
447 return WEBRTC_VIDEO_CODEC_OK;
448 }
449 frames_resolution_update_ = 0;
450
451 bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
452
453 // Check if we accumulated too many frames in encoder input buffers
454 // or the encoder latency exceeds 70 ms and drop frame if so.
455 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
456 int encoder_latency_ms = last_input_timestamp_ms_ -
457 last_output_timestamp_ms_;
458 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
459 ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
460 encoder_latency_ms, frames_in_queue_);
461 frames_dropped_++;
462 return WEBRTC_VIDEO_CODEC_OK;
463 }
464 }
465
466 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
467 j_dequeue_input_buffer_method_);
468 CHECK_EXCEPTION(jni);
469 if (j_input_buffer_index == -1) {
470 // Video codec falls behind - no input buffer available.
471 ALOGV("Encoder drop frame - no input buffers available");
472 frames_dropped_++;
473 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
474 }
475 if (j_input_buffer_index == -2) {
476 ResetCodec();
477 return WEBRTC_VIDEO_CODEC_ERROR;
478 }
479
480 ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
481 frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
482
483 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
484 uint8* yuv_buffer =
485 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
486 CHECK_EXCEPTION(jni);
487 CHECK(yuv_buffer) << "Indirect buffer??";
488 CHECK(!libyuv::ConvertFromI420(
489 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
490 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
491 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
492 yuv_buffer, width_,
493 width_, height_,
494 encoder_fourcc_))
495 << "ConvertFromI420 failed";
496 last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
497 frames_in_queue_++;
498
499 // Save input image timestamps for later output
500 timestamps_.push_back(frame.timestamp());
501 render_times_ms_.push_back(frame.render_time_ms());
502 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
503
504 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
505 j_encode_method_,
506 key_frame,
507 j_input_buffer_index,
508 yuv_size_,
509 current_timestamp_us_);
510 CHECK_EXCEPTION(jni);
511 current_timestamp_us_ += 1000000 / last_set_fps_;
512
513 if (!encode_status || !DeliverPendingOutputs(jni)) {
514 ResetCodec();
515 return WEBRTC_VIDEO_CODEC_ERROR;
516 }
517
518 return WEBRTC_VIDEO_CODEC_OK;
519}
520
521int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
522 webrtc::EncodedImageCallback* callback) {
523 CheckOnCodecThread();
524 JNIEnv* jni = AttachCurrentThreadIfNeeded();
525 ScopedLocalRefFrame local_ref_frame(jni);
526 callback_ = callback;
527 return WEBRTC_VIDEO_CODEC_OK;
528}
529
530int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
531 if (!inited_) {
532 return WEBRTC_VIDEO_CODEC_OK;
533 }
534 CheckOnCodecThread();
535 JNIEnv* jni = AttachCurrentThreadIfNeeded();
536 ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
537 frames_received_, frames_dropped_);
538 ScopedLocalRefFrame local_ref_frame(jni);
539 for (size_t i = 0; i < input_buffers_.size(); ++i)
540 jni->DeleteGlobalRef(input_buffers_[i]);
541 input_buffers_.clear();
542 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
543 CHECK_EXCEPTION(jni);
544 rtc::MessageQueueManager::Clear(this);
545 inited_ = false;
546 return WEBRTC_VIDEO_CODEC_OK;
547}
548
549int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
550 uint32_t frame_rate) {
551 CheckOnCodecThread();
552 if (last_set_bitrate_kbps_ == new_bit_rate &&
553 last_set_fps_ == frame_rate) {
554 return WEBRTC_VIDEO_CODEC_OK;
555 }
556 JNIEnv* jni = AttachCurrentThreadIfNeeded();
557 ScopedLocalRefFrame local_ref_frame(jni);
558 if (new_bit_rate > 0) {
559 last_set_bitrate_kbps_ = new_bit_rate;
560 }
561 if (frame_rate > 0) {
562 last_set_fps_ = frame_rate;
563 }
564 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
565 j_set_rates_method_,
566 last_set_bitrate_kbps_,
567 last_set_fps_);
568 CHECK_EXCEPTION(jni);
569 if (!ret) {
570 ResetCodec();
571 return WEBRTC_VIDEO_CODEC_ERROR;
572 }
573 return WEBRTC_VIDEO_CODEC_OK;
574}
575
576int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
577 JNIEnv* jni,
578 jobject j_output_buffer_info) {
579 return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
580}
581
582jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
583 JNIEnv* jni,
584 jobject j_output_buffer_info) {
585 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
586}
587
588bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
589 JNIEnv* jni,
590 jobject j_output_buffer_info) {
591 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
592}
593
594jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
595 JNIEnv* jni,
596 jobject j_output_buffer_info) {
597 return GetLongField(
598 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
599}
600
601bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
602 while (true) {
603 jobject j_output_buffer_info = jni->CallObjectMethod(
604 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
605 CHECK_EXCEPTION(jni);
606 if (IsNull(jni, j_output_buffer_info)) {
607 break;
608 }
609
610 int output_buffer_index =
611 GetOutputBufferInfoIndex(jni, j_output_buffer_info);
612 if (output_buffer_index == -1) {
613 ResetCodec();
614 return false;
615 }
616
617 // Get frame timestamps from a queue.
618 last_output_timestamp_ms_ =
619 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
620 1000;
621 int32_t timestamp = timestamps_.front();
622 timestamps_.erase(timestamps_.begin());
623 int64_t render_time_ms = render_times_ms_.front();
624 render_times_ms_.erase(render_times_ms_.begin());
625 int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
626 frame_rtc_times_ms_.front();
627 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
628 frames_in_queue_--;
629
630 // Extract payload and key frame flag.
631 int32_t callback_status = 0;
632 jobject j_output_buffer =
633 GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
634 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
635 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
636 uint8* payload = reinterpret_cast<uint8_t*>(
637 jni->GetDirectBufferAddress(j_output_buffer));
638 CHECK_EXCEPTION(jni);
639
640 ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
641 " EncTime: %lld",
642 output_buffer_index, payload_size, last_output_timestamp_ms_,
643 last_input_timestamp_ms_ - last_output_timestamp_ms_,
644 frame_encoding_time_ms);
645
646 // Calculate and print encoding statistics - every 3 seconds.
647 current_frames_++;
648 current_bytes_ += payload_size;
649 current_encoding_time_ms_ += frame_encoding_time_ms;
650 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
651 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
652 current_frames_ > 0) {
653 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
654 " encTime: %d for last %d ms",
655 current_bytes_ * 8 / statistic_time_ms,
656 last_set_bitrate_kbps_,
657 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
658 current_encoding_time_ms_ / current_frames_, statistic_time_ms);
659 start_time_ms_ = GetCurrentTimeMs();
660 current_frames_ = 0;
661 current_bytes_ = 0;
662 current_encoding_time_ms_ = 0;
663 }
664
665 // Callback - return encoded frame.
666 if (callback_) {
667 scoped_ptr<webrtc::EncodedImage> image(
668 new webrtc::EncodedImage(payload, payload_size, payload_size));
669 image->_encodedWidth = width_;
670 image->_encodedHeight = height_;
671 image->_timeStamp = timestamp;
672 image->capture_time_ms_ = render_time_ms;
673 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
674 image->_completeFrame = true;
675
676 webrtc::CodecSpecificInfo info;
677 memset(&info, 0, sizeof(info));
678 info.codecType = kVideoCodecVP8;
679 info.codecSpecific.VP8.pictureId = picture_id_;
680 info.codecSpecific.VP8.nonReference = false;
681 info.codecSpecific.VP8.simulcastIdx = 0;
682 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
683 info.codecSpecific.VP8.layerSync = false;
684 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
685 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
686 picture_id_ = (picture_id_ + 1) & 0x7FFF;
687
688 // Generate a header describing a single fragment.
689 webrtc::RTPFragmentationHeader header;
690 memset(&header, 0, sizeof(header));
691 header.VerifyAndAllocateFragmentationHeader(1);
692 header.fragmentationOffset[0] = 0;
693 header.fragmentationLength[0] = image->_length;
694 header.fragmentationPlType[0] = 0;
695 header.fragmentationTimeDiff[0] = 0;
696
697 callback_status = callback_->Encoded(*image, &info, &header);
698 }
699
700 // Return output buffer back to the encoder.
701 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
702 j_release_output_buffer_method_,
703 output_buffer_index);
704 CHECK_EXCEPTION(jni);
705 if (!success) {
706 ResetCodec();
707 return false;
708 }
709
710 if (callback_status > 0) {
711 drop_next_input_frame_ = true;
712 // Theoretically could handle callback_status<0 here, but unclear what that
713 // would mean for us.
714 }
715 }
716
717 return true;
718}
719
720MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
721 JNIEnv* jni = AttachCurrentThreadIfNeeded();
722 ScopedLocalRefFrame local_ref_frame(jni);
723 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
724 bool is_platform_supported = jni->CallStaticBooleanMethod(
725 j_encoder_class,
726 GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
727 CHECK_EXCEPTION(jni);
728 if (!is_platform_supported)
729 return;
730
731 // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
732 // encoder? Sure would be. Too bad it doesn't. So we hard-code some
733 // reasonable defaults.
734 supported_codecs_.push_back(
735 VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
736}
737
738MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
739
740webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
741 webrtc::VideoCodecType type) {
742 if (type != kVideoCodecVP8 || supported_codecs_.empty())
743 return NULL;
744 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
745}
746
747const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
748MediaCodecVideoEncoderFactory::codecs() const {
749 return supported_codecs_;
750}
751
752void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
753 webrtc::VideoEncoder* encoder) {
754 delete encoder;
755}
756
757} // namespace webrtc_jni
758