blob: edefe20dbceef6d80310efe753749b6872e6b965 [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
30#include "talk/app/webrtc/java/jni/classreferenceholder.h"
31#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
32#include "webrtc/base/bind.h"
33#include "webrtc/base/checks.h"
34#include "webrtc/base/logging.h"
35#include "webrtc/base/thread.h"
36#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
37#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
38#include "third_party/libyuv/include/libyuv/convert.h"
39#include "third_party/libyuv/include/libyuv/convert_from.h"
40#include "third_party/libyuv/include/libyuv/video_common.h"
41
42using rtc::Bind;
43using rtc::Thread;
44using rtc::ThreadManager;
45using rtc::scoped_ptr;
46
47using webrtc::CodecSpecificInfo;
48using webrtc::EncodedImage;
49using webrtc::I420VideoFrame;
50using webrtc::RTPFragmentationHeader;
51using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000052using webrtc::VideoCodecType;
53using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000054using webrtc::kVideoCodecVP8;
55
56namespace webrtc_jni {
57
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000058// H.264 start code length.
59#define H264_SC_LENGTH 4
60// Maximum allowed NALUs in one output frame.
61#define MAX_NALUS_PERFRAME 32
62// Maximum supported HW video encoder resolution.
63#define MAX_VIDEO_WIDTH 1280
64#define MAX_VIDEO_HEIGHT 1280
65// Maximum supported HW video encoder fps.
66#define MAX_VIDEO_FPS 30
67
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000068// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
69// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
70// HW-backed video encode. This C++ class is implemented as a very thin shim,
71// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
72// MediaCodecVideoEncoder is created, operated, and destroyed on a single
73// thread, currently the libjingle Worker thread.
74class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
75 public rtc::MessageHandler {
76 public:
77 virtual ~MediaCodecVideoEncoder();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000078 explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000079
80 // webrtc::VideoEncoder implementation. Everything trampolines to
81 // |codec_thread_| for execution.
82 int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
83 int32_t /* number_of_cores */,
84 size_t /* max_payload_size */) override;
85 int32_t Encode(
86 const webrtc::I420VideoFrame& input_image,
87 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
88 const std::vector<webrtc::VideoFrameType>* frame_types) override;
89 int32_t RegisterEncodeCompleteCallback(
90 webrtc::EncodedImageCallback* callback) override;
91 int32_t Release() override;
92 int32_t SetChannelParameters(uint32_t /* packet_loss */,
93 int64_t /* rtt */) override;
94 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
95
96 // rtc::MessageHandler implementation.
97 void OnMessage(rtc::Message* msg) override;
98
99 private:
100 // CHECK-fail if not running on |codec_thread_|.
101 void CheckOnCodecThread();
102
103 // Release() and InitEncode() in an attempt to restore the codec to an
104 // operable state. Necessary after all manner of OMX-layer errors.
105 void ResetCodec();
106
107 // Implementation of webrtc::VideoEncoder methods above, all running on the
108 // codec thread exclusively.
109 //
110 // If width==0 then this is assumed to be a re-initialization and the
111 // previously-current values are reused instead of the passed parameters
112 // (makes it easier to reason about thread-safety).
113 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
114 int32_t EncodeOnCodecThread(
115 const webrtc::I420VideoFrame& input_image,
116 const std::vector<webrtc::VideoFrameType>* frame_types);
117 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
118 webrtc::EncodedImageCallback* callback);
119 int32_t ReleaseOnCodecThread();
120 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
121
122 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
123 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
124 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
125 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
126 jlong GetOutputBufferInfoPresentationTimestampUs(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000127 JNIEnv* jni, jobject j_output_buffer_info);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000128
129 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
130 // true on success.
131 bool DeliverPendingOutputs(JNIEnv* jni);
132
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000133 // Search for H.264 start codes.
134 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
135
136 // Type of video codec.
137 VideoCodecType codecType_;
138
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000139 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
140 // |codec_thread_| synchronously.
141 webrtc::EncodedImageCallback* callback_;
142
143 // State that is constant for the lifetime of this object once the ctor
144 // returns.
145 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
146 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
147 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
148 jmethodID j_init_encode_method_;
149 jmethodID j_dequeue_input_buffer_method_;
150 jmethodID j_encode_method_;
151 jmethodID j_release_method_;
152 jmethodID j_set_rates_method_;
153 jmethodID j_dequeue_output_buffer_method_;
154 jmethodID j_release_output_buffer_method_;
155 jfieldID j_color_format_field_;
156 jfieldID j_info_index_field_;
157 jfieldID j_info_buffer_field_;
158 jfieldID j_info_is_key_frame_field_;
159 jfieldID j_info_presentation_timestamp_us_field_;
160
161 // State that is valid only between InitEncode() and the next Release().
162 // Touched only on codec_thread_ so no explicit synchronization necessary.
163 int width_; // Frame width in pixels.
164 int height_; // Frame height in pixels.
165 bool inited_;
166 uint16_t picture_id_;
167 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
168 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
169 int last_set_fps_; // Last-requested frame rate.
170 int64_t current_timestamp_us_; // Current frame timestamps in us.
171 int frames_received_; // Number of frames received by encoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000172 int frames_encoded_; // Number of frames encoded by encoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000173 int frames_dropped_; // Number of frames dropped by encoder.
174 int frames_resolution_update_; // Number of frames with new codec resolution.
175 int frames_in_queue_; // Number of frames in encoder queue.
176 int64_t start_time_ms_; // Start time for statistics.
177 int current_frames_; // Number of frames in the current statistics interval.
178 int current_bytes_; // Encoded bytes in the current statistics interval.
179 int current_encoding_time_ms_; // Overall encoding time in the current second
180 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
181 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
182 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
183 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
184 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
185 // encoder input.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000186 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
187 int64_t output_render_time_ms_; // Last output frame render time from
188 // render_times_ms_ queue.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000189 // Frame size in bytes fed to MediaCodec.
190 int yuv_size_;
191 // True only when between a callback_->Encoded() call return a positive value
192 // and the next Encode() call being ignored.
193 bool drop_next_input_frame_;
194 // Global references; must be deleted in Release().
195 std::vector<jobject> input_buffers_;
196};
197
198MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
199 // Call Release() to ensure no more callbacks to us after we are deleted.
200 Release();
201}
202
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000203MediaCodecVideoEncoder::MediaCodecVideoEncoder(
204 JNIEnv* jni, VideoCodecType codecType) :
205 codecType_(codecType),
206 callback_(NULL),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000207 inited_(false),
208 picture_id_(0),
209 codec_thread_(new Thread()),
210 j_media_codec_video_encoder_class_(
211 jni,
212 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
213 j_media_codec_video_encoder_(
214 jni,
215 jni->NewObject(*j_media_codec_video_encoder_class_,
216 GetMethodID(jni,
217 *j_media_codec_video_encoder_class_,
218 "<init>",
219 "()V"))) {
220 ScopedLocalRefFrame local_ref_frame(jni);
221 // It would be nice to avoid spinning up a new thread per MediaCodec, and
222 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
223 // 2732 means that deadlocks abound. This class synchronously trampolines
224 // to |codec_thread_|, so if anything else can be coming to _us_ from
225 // |codec_thread_|, or from any thread holding the |_sendCritSect| described
226 // in the bug, we have a problem. For now work around that with a dedicated
227 // thread.
228 codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
229 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
230
231 jclass j_output_buffer_info_class =
232 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000233 j_init_encode_method_ = GetMethodID(
234 jni,
235 *j_media_codec_video_encoder_class_,
236 "initEncode",
237 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
238 "[Ljava/nio/ByteBuffer;");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000239 j_dequeue_input_buffer_method_ = GetMethodID(
240 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
241 j_encode_method_ = GetMethodID(
242 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
243 j_release_method_ =
244 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
245 j_set_rates_method_ = GetMethodID(
246 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000247 j_dequeue_output_buffer_method_ = GetMethodID(
248 jni,
249 *j_media_codec_video_encoder_class_,
250 "dequeueOutputBuffer",
251 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000252 j_release_output_buffer_method_ = GetMethodID(
253 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
254
255 j_color_format_field_ =
256 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
257 j_info_index_field_ =
258 GetFieldID(jni, j_output_buffer_info_class, "index", "I");
259 j_info_buffer_field_ = GetFieldID(
260 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
261 j_info_is_key_frame_field_ =
262 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
263 j_info_presentation_timestamp_us_field_ = GetFieldID(
264 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
265 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
266 AllowBlockingCalls();
267}
268
269int32_t MediaCodecVideoEncoder::InitEncode(
270 const webrtc::VideoCodec* codec_settings,
271 int32_t /* number_of_cores */,
272 size_t /* max_payload_size */) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000273 if (codec_settings == NULL) {
274 ALOGE("NULL VideoCodec instance");
275 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
276 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000277 // Factory should guard against other codecs being used with us.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000278 CHECK(codec_settings->codecType == codecType_) << "Unsupported codec " <<
279 codec_settings->codecType << " for " << codecType_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000280
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000281 ALOGD("InitEncode request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000282 return codec_thread_->Invoke<int32_t>(
283 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
284 this,
285 codec_settings->width,
286 codec_settings->height,
287 codec_settings->startBitrate,
288 codec_settings->maxFramerate));
289}
290
291int32_t MediaCodecVideoEncoder::Encode(
292 const webrtc::I420VideoFrame& frame,
293 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
294 const std::vector<webrtc::VideoFrameType>* frame_types) {
295 return codec_thread_->Invoke<int32_t>(Bind(
296 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
297}
298
299int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
300 webrtc::EncodedImageCallback* callback) {
301 return codec_thread_->Invoke<int32_t>(
302 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
303 this,
304 callback));
305}
306
307int32_t MediaCodecVideoEncoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000308 ALOGD("EncoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000309 return codec_thread_->Invoke<int32_t>(
310 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
311}
312
313int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
314 int64_t /* rtt */) {
315 return WEBRTC_VIDEO_CODEC_OK;
316}
317
318int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
319 uint32_t frame_rate) {
320 return codec_thread_->Invoke<int32_t>(
321 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
322 this,
323 new_bit_rate,
324 frame_rate));
325}
326
327void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
328 JNIEnv* jni = AttachCurrentThreadIfNeeded();
329 ScopedLocalRefFrame local_ref_frame(jni);
330
331 // We only ever send one message to |this| directly (not through a Bind()'d
332 // functor), so expect no ID/data.
333 CHECK(!msg->message_id) << "Unexpected message!";
334 CHECK(!msg->pdata) << "Unexpected message!";
335 CheckOnCodecThread();
336 if (!inited_) {
337 return;
338 }
339
340 // It would be nice to recover from a failure here if one happened, but it's
341 // unclear how to signal such a failure to the app, so instead we stay silent
342 // about it and let the next app-called API method reveal the borkedness.
343 DeliverPendingOutputs(jni);
344 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
345}
346
347void MediaCodecVideoEncoder::CheckOnCodecThread() {
348 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
349 << "Running on wrong thread!";
350}
351
352void MediaCodecVideoEncoder::ResetCodec() {
353 ALOGE("ResetCodec");
354 if (Release() != WEBRTC_VIDEO_CODEC_OK ||
355 codec_thread_->Invoke<int32_t>(Bind(
356 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
357 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
358 // TODO(fischman): wouldn't it be nice if there was a way to gracefully
359 // degrade to a SW encoder at this point? There isn't one AFAICT :(
360 // https://code.google.com/p/webrtc/issues/detail?id=2920
361 }
362}
363
364int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
365 int width, int height, int kbps, int fps) {
366 CheckOnCodecThread();
367 JNIEnv* jni = AttachCurrentThreadIfNeeded();
368 ScopedLocalRefFrame local_ref_frame(jni);
369
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000370 ALOGD("InitEncodeOnCodecThread Type: %d. %d x %d. Bitrate: %d kbps. Fps: %d",
371 (int)codecType_, width, height, kbps, fps);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000372 if (kbps == 0) {
373 kbps = last_set_bitrate_kbps_;
374 }
375 if (fps == 0) {
376 fps = last_set_fps_;
377 }
378
379 width_ = width;
380 height_ = height;
381 last_set_bitrate_kbps_ = kbps;
382 last_set_fps_ = fps;
383 yuv_size_ = width_ * height_ * 3 / 2;
384 frames_received_ = 0;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000385 frames_encoded_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000386 frames_dropped_ = 0;
387 frames_resolution_update_ = 0;
388 frames_in_queue_ = 0;
389 current_timestamp_us_ = 0;
390 start_time_ms_ = GetCurrentTimeMs();
391 current_frames_ = 0;
392 current_bytes_ = 0;
393 current_encoding_time_ms_ = 0;
394 last_input_timestamp_ms_ = -1;
395 last_output_timestamp_ms_ = -1;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000396 output_timestamp_ = 0;
397 output_render_time_ms_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000398 timestamps_.clear();
399 render_times_ms_.clear();
400 frame_rtc_times_ms_.clear();
401 drop_next_input_frame_ = false;
402 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
403 // We enforce no extra stride/padding in the format creation step.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000404 jobject j_video_codec_enum = JavaEnumFromIndex(
405 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000406 jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
407 jni->CallObjectMethod(*j_media_codec_video_encoder_,
408 j_init_encode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000409 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000410 width_,
411 height_,
412 kbps,
413 fps));
414 CHECK_EXCEPTION(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000415 if (IsNull(jni, input_buffers)) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000416 return WEBRTC_VIDEO_CODEC_ERROR;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000417 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000418
419 inited_ = true;
420 switch (GetIntField(jni, *j_media_codec_video_encoder_,
421 j_color_format_field_)) {
422 case COLOR_FormatYUV420Planar:
423 encoder_fourcc_ = libyuv::FOURCC_YU12;
424 break;
425 case COLOR_FormatYUV420SemiPlanar:
426 case COLOR_QCOM_FormatYUV420SemiPlanar:
427 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
428 encoder_fourcc_ = libyuv::FOURCC_NV12;
429 break;
430 default:
431 LOG(LS_ERROR) << "Wrong color format.";
432 return WEBRTC_VIDEO_CODEC_ERROR;
433 }
434 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
435 CHECK(input_buffers_.empty())
436 << "Unexpected double InitEncode without Release";
437 input_buffers_.resize(num_input_buffers);
438 for (size_t i = 0; i < num_input_buffers; ++i) {
439 input_buffers_[i] =
440 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
441 int64 yuv_buffer_capacity =
442 jni->GetDirectBufferCapacity(input_buffers_[i]);
443 CHECK_EXCEPTION(jni);
444 CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
445 }
446 CHECK_EXCEPTION(jni);
447
448 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
449 return WEBRTC_VIDEO_CODEC_OK;
450}
451
452int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
453 const webrtc::I420VideoFrame& frame,
454 const std::vector<webrtc::VideoFrameType>* frame_types) {
455 CheckOnCodecThread();
456 JNIEnv* jni = AttachCurrentThreadIfNeeded();
457 ScopedLocalRefFrame local_ref_frame(jni);
458
459 if (!inited_) {
460 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
461 }
462 frames_received_++;
463 if (!DeliverPendingOutputs(jni)) {
464 ResetCodec();
465 // Continue as if everything's fine.
466 }
467
468 if (drop_next_input_frame_) {
469 ALOGV("Encoder drop frame - failed callback.");
470 drop_next_input_frame_ = false;
471 return WEBRTC_VIDEO_CODEC_OK;
472 }
473
474 CHECK(frame_types->size() == 1) << "Unexpected stream count";
475 if (frame.width() != width_ || frame.height() != height_) {
476 frames_resolution_update_++;
477 ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
478 width_, height_, frame.width(), frame.height());
479 if (frames_resolution_update_ > 3) {
480 // Reset codec if we received more than 3 frames with new resolution.
481 width_ = frame.width();
482 height_ = frame.height();
483 frames_resolution_update_ = 0;
484 ResetCodec();
485 }
486 return WEBRTC_VIDEO_CODEC_OK;
487 }
488 frames_resolution_update_ = 0;
489
490 bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
491
492 // Check if we accumulated too many frames in encoder input buffers
493 // or the encoder latency exceeds 70 ms and drop frame if so.
494 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
495 int encoder_latency_ms = last_input_timestamp_ms_ -
496 last_output_timestamp_ms_;
497 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
498 ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
499 encoder_latency_ms, frames_in_queue_);
500 frames_dropped_++;
501 return WEBRTC_VIDEO_CODEC_OK;
502 }
503 }
504
505 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
506 j_dequeue_input_buffer_method_);
507 CHECK_EXCEPTION(jni);
508 if (j_input_buffer_index == -1) {
509 // Video codec falls behind - no input buffer available.
510 ALOGV("Encoder drop frame - no input buffers available");
511 frames_dropped_++;
512 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
513 }
514 if (j_input_buffer_index == -2) {
515 ResetCodec();
516 return WEBRTC_VIDEO_CODEC_ERROR;
517 }
518
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000519 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
520 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000521
522 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
523 uint8* yuv_buffer =
524 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
525 CHECK_EXCEPTION(jni);
526 CHECK(yuv_buffer) << "Indirect buffer??";
527 CHECK(!libyuv::ConvertFromI420(
528 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
529 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
530 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
531 yuv_buffer, width_,
532 width_, height_,
533 encoder_fourcc_))
534 << "ConvertFromI420 failed";
535 last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
536 frames_in_queue_++;
537
538 // Save input image timestamps for later output
539 timestamps_.push_back(frame.timestamp());
540 render_times_ms_.push_back(frame.render_time_ms());
541 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
542
543 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
544 j_encode_method_,
545 key_frame,
546 j_input_buffer_index,
547 yuv_size_,
548 current_timestamp_us_);
549 CHECK_EXCEPTION(jni);
550 current_timestamp_us_ += 1000000 / last_set_fps_;
551
552 if (!encode_status || !DeliverPendingOutputs(jni)) {
553 ResetCodec();
554 return WEBRTC_VIDEO_CODEC_ERROR;
555 }
556
557 return WEBRTC_VIDEO_CODEC_OK;
558}
559
560int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
561 webrtc::EncodedImageCallback* callback) {
562 CheckOnCodecThread();
563 JNIEnv* jni = AttachCurrentThreadIfNeeded();
564 ScopedLocalRefFrame local_ref_frame(jni);
565 callback_ = callback;
566 return WEBRTC_VIDEO_CODEC_OK;
567}
568
569int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
570 if (!inited_) {
571 return WEBRTC_VIDEO_CODEC_OK;
572 }
573 CheckOnCodecThread();
574 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000575 ALOGD("EncoderReleaseOnCodecThread: Frames received: %d. Encoded: %d. "
576 "Dropped: %d.", frames_received_, frames_encoded_, frames_dropped_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000577 ScopedLocalRefFrame local_ref_frame(jni);
578 for (size_t i = 0; i < input_buffers_.size(); ++i)
579 jni->DeleteGlobalRef(input_buffers_[i]);
580 input_buffers_.clear();
581 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
582 CHECK_EXCEPTION(jni);
583 rtc::MessageQueueManager::Clear(this);
584 inited_ = false;
585 return WEBRTC_VIDEO_CODEC_OK;
586}
587
588int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
589 uint32_t frame_rate) {
590 CheckOnCodecThread();
591 if (last_set_bitrate_kbps_ == new_bit_rate &&
592 last_set_fps_ == frame_rate) {
593 return WEBRTC_VIDEO_CODEC_OK;
594 }
595 JNIEnv* jni = AttachCurrentThreadIfNeeded();
596 ScopedLocalRefFrame local_ref_frame(jni);
597 if (new_bit_rate > 0) {
598 last_set_bitrate_kbps_ = new_bit_rate;
599 }
600 if (frame_rate > 0) {
601 last_set_fps_ = frame_rate;
602 }
603 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
604 j_set_rates_method_,
605 last_set_bitrate_kbps_,
606 last_set_fps_);
607 CHECK_EXCEPTION(jni);
608 if (!ret) {
609 ResetCodec();
610 return WEBRTC_VIDEO_CODEC_ERROR;
611 }
612 return WEBRTC_VIDEO_CODEC_OK;
613}
614
615int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
616 JNIEnv* jni,
617 jobject j_output_buffer_info) {
618 return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
619}
620
621jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
622 JNIEnv* jni,
623 jobject j_output_buffer_info) {
624 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
625}
626
627bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
628 JNIEnv* jni,
629 jobject j_output_buffer_info) {
630 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
631}
632
633jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
634 JNIEnv* jni,
635 jobject j_output_buffer_info) {
636 return GetLongField(
637 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
638}
639
640bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
641 while (true) {
642 jobject j_output_buffer_info = jni->CallObjectMethod(
643 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
644 CHECK_EXCEPTION(jni);
645 if (IsNull(jni, j_output_buffer_info)) {
646 break;
647 }
648
649 int output_buffer_index =
650 GetOutputBufferInfoIndex(jni, j_output_buffer_info);
651 if (output_buffer_index == -1) {
652 ResetCodec();
653 return false;
654 }
655
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000656 // Get key and config frame flags.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000657 jobject j_output_buffer =
658 GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
659 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000660
661 // Get frame timestamps from a queue - for non config frames only.
662 int64_t frame_encoding_time_ms = 0;
663 last_output_timestamp_ms_ =
664 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
665 1000;
666 if (frames_in_queue_ > 0) {
667 output_timestamp_ = timestamps_.front();
668 timestamps_.erase(timestamps_.begin());
669 output_render_time_ms_ = render_times_ms_.front();
670 render_times_ms_.erase(render_times_ms_.begin());
671 frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
672 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
673 frames_in_queue_--;
674 }
675
676 // Extract payload.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000677 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
678 uint8* payload = reinterpret_cast<uint8_t*>(
679 jni->GetDirectBufferAddress(j_output_buffer));
680 CHECK_EXCEPTION(jni);
681
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000682 ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld."
683 " Latency: %lld. EncTime: %lld",
684 frames_encoded_, key_frame, payload_size,
685 last_output_timestamp_ms_,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000686 last_input_timestamp_ms_ - last_output_timestamp_ms_,
687 frame_encoding_time_ms);
688
689 // Calculate and print encoding statistics - every 3 seconds.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000690 frames_encoded_++;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000691 current_frames_++;
692 current_bytes_ += payload_size;
693 current_encoding_time_ms_ += frame_encoding_time_ms;
694 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
695 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
696 current_frames_ > 0) {
697 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
698 " encTime: %d for last %d ms",
699 current_bytes_ * 8 / statistic_time_ms,
700 last_set_bitrate_kbps_,
701 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
702 current_encoding_time_ms_ / current_frames_, statistic_time_ms);
703 start_time_ms_ = GetCurrentTimeMs();
704 current_frames_ = 0;
705 current_bytes_ = 0;
706 current_encoding_time_ms_ = 0;
707 }
708
709 // Callback - return encoded frame.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000710 int32_t callback_status = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000711 if (callback_) {
712 scoped_ptr<webrtc::EncodedImage> image(
713 new webrtc::EncodedImage(payload, payload_size, payload_size));
714 image->_encodedWidth = width_;
715 image->_encodedHeight = height_;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000716 image->_timeStamp = output_timestamp_;
717 image->capture_time_ms_ = output_render_time_ms_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000718 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
719 image->_completeFrame = true;
720
721 webrtc::CodecSpecificInfo info;
722 memset(&info, 0, sizeof(info));
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000723 info.codecType = codecType_;
724 if (codecType_ == kVideoCodecVP8) {
725 info.codecSpecific.VP8.pictureId = picture_id_;
726 info.codecSpecific.VP8.nonReference = false;
727 info.codecSpecific.VP8.simulcastIdx = 0;
728 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
729 info.codecSpecific.VP8.layerSync = false;
730 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
731 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
732 picture_id_ = (picture_id_ + 1) & 0x7FFF;
733 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000734
735 // Generate a header describing a single fragment.
736 webrtc::RTPFragmentationHeader header;
737 memset(&header, 0, sizeof(header));
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000738 if (codecType_ == kVideoCodecVP8) {
739 header.VerifyAndAllocateFragmentationHeader(1);
740 header.fragmentationOffset[0] = 0;
741 header.fragmentationLength[0] = image->_length;
742 header.fragmentationPlType[0] = 0;
743 header.fragmentationTimeDiff[0] = 0;
744 } else if (codecType_ == kVideoCodecH264) {
745 // For H.264 search for start codes.
746 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
747 int32_t scPositionsLength = 0;
748 int32_t scPosition = 0;
749 while (scPositionsLength < MAX_NALUS_PERFRAME) {
750 int32_t naluPosition = NextNaluPosition(
751 payload + scPosition, payload_size - scPosition);
752 if (naluPosition < 0) {
753 break;
754 }
755 scPosition += naluPosition;
756 scPositions[scPositionsLength++] = scPosition;
757 scPosition += H264_SC_LENGTH;
758 }
759 if (scPositionsLength == 0) {
760 ALOGE("Start code is not found!");
761 ALOGE("Data 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
762 image->_buffer[0], image->_buffer[1], image->_buffer[2],
763 image->_buffer[3], image->_buffer[4], image->_buffer[5]);
764 ResetCodec();
765 return false;
766 }
767 scPositions[scPositionsLength] = payload_size;
768 header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
769 for (size_t i = 0; i < scPositionsLength; i++) {
770 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
771 header.fragmentationLength[i] =
772 scPositions[i + 1] - header.fragmentationOffset[i];
773 header.fragmentationPlType[i] = 0;
774 header.fragmentationTimeDiff[i] = 0;
775 }
776 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000777
778 callback_status = callback_->Encoded(*image, &info, &header);
779 }
780
781 // Return output buffer back to the encoder.
782 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
783 j_release_output_buffer_method_,
784 output_buffer_index);
785 CHECK_EXCEPTION(jni);
786 if (!success) {
787 ResetCodec();
788 return false;
789 }
790
791 if (callback_status > 0) {
792 drop_next_input_frame_ = true;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000793 // Theoretically could handle callback_status<0 here, but unclear what
794 // that would mean for us.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000795 }
796 }
797
798 return true;
799}
800
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000801int32_t MediaCodecVideoEncoder::NextNaluPosition(
802 uint8_t *buffer, size_t buffer_size) {
803 if (buffer_size < H264_SC_LENGTH) {
804 return -1;
805 }
806 uint8_t *head = buffer;
807 // Set end buffer pointer to 4 bytes before actual buffer end so we can
808 // access head[1], head[2] and head[3] in a loop without buffer overrun.
809 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
810
811 while (head < end) {
812 if (head[0]) {
813 head++;
814 continue;
815 }
816 if (head[1]) { // got 00xx
817 head += 2;
818 continue;
819 }
820 if (head[2]) { // got 0000xx
821 head += 3;
822 continue;
823 }
824 if (head[3] != 0x01) { // got 000000xx
825 head += 4; // xx != 1, continue searching.
826 continue;
827 }
828 return (int32_t)(head - buffer);
829 }
830 return -1;
831}
832
833
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000834MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
835 JNIEnv* jni = AttachCurrentThreadIfNeeded();
836 ScopedLocalRefFrame local_ref_frame(jni);
837 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000838 supported_codecs_.clear();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000839
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000840 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
841 j_encoder_class,
842 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
843 CHECK_EXCEPTION(jni);
844 if (is_vp8_hw_supported) {
845 ALOGD("VP8 HW Encoder supported.");
846 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
847 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
848 }
849
850 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
851 j_encoder_class,
852 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
853 CHECK_EXCEPTION(jni);
854 if (is_h264_hw_supported) {
855 ALOGD("H.264 HW Encoder supported.");
856 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
857 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
858 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000859}
860
861MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
862
863webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000864 VideoCodecType type) {
865 if (supported_codecs_.empty()) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000866 return NULL;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000867 }
868 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
869 it != supported_codecs_.end(); ++it) {
870 if (it->type == type) {
871 ALOGD("Create HW video encoder for type %d (%s).",
872 (int)type, it->name.c_str());
873 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
874 }
875 }
876 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000877}
878
879const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
880MediaCodecVideoEncoderFactory::codecs() const {
881 return supported_codecs_;
882}
883
884void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
885 webrtc::VideoEncoder* encoder) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000886 ALOGD("Destroy video encoder.");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000887 delete encoder;
888}
889
890} // namespace webrtc_jni
891