blob: 25ffe915862a75220097b0738d09f0089b408004 [file] [log] [blame]
glaznev@webrtc.org18c92472015-02-18 18:42:55 +00001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
30#include "talk/app/webrtc/java/jni/classreferenceholder.h"
31#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
32#include "webrtc/base/bind.h"
33#include "webrtc/base/checks.h"
34#include "webrtc/base/logging.h"
35#include "webrtc/base/thread.h"
36#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
jackychen61b4d512015-04-21 15:30:11 -070037#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
jackychen98d8cf52015-05-21 11:12:02 -070038#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000039#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
40#include "third_party/libyuv/include/libyuv/convert.h"
41#include "third_party/libyuv/include/libyuv/convert_from.h"
42#include "third_party/libyuv/include/libyuv/video_common.h"
43
44using rtc::Bind;
45using rtc::Thread;
46using rtc::ThreadManager;
47using rtc::scoped_ptr;
48
49using webrtc::CodecSpecificInfo;
50using webrtc::EncodedImage;
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -070051using webrtc::VideoFrame;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000052using webrtc::RTPFragmentationHeader;
53using webrtc::VideoCodec;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000054using webrtc::VideoCodecType;
55using webrtc::kVideoCodecH264;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000056using webrtc::kVideoCodecVP8;
57
58namespace webrtc_jni {
59
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000060// H.264 start code length.
61#define H264_SC_LENGTH 4
62// Maximum allowed NALUs in one output frame.
63#define MAX_NALUS_PERFRAME 32
64// Maximum supported HW video encoder resolution.
65#define MAX_VIDEO_WIDTH 1280
66#define MAX_VIDEO_HEIGHT 1280
67// Maximum supported HW video encoder fps.
68#define MAX_VIDEO_FPS 30
69
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000070// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
71// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
72// HW-backed video encode. This C++ class is implemented as a very thin shim,
73// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
74// MediaCodecVideoEncoder is created, operated, and destroyed on a single
75// thread, currently the libjingle Worker thread.
76class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
77 public rtc::MessageHandler {
78 public:
79 virtual ~MediaCodecVideoEncoder();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +000080 explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000081
82 // webrtc::VideoEncoder implementation. Everything trampolines to
83 // |codec_thread_| for execution.
84 int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
85 int32_t /* number_of_cores */,
86 size_t /* max_payload_size */) override;
87 int32_t Encode(
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -070088 const webrtc::VideoFrame& input_image,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +000089 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
90 const std::vector<webrtc::VideoFrameType>* frame_types) override;
91 int32_t RegisterEncodeCompleteCallback(
92 webrtc::EncodedImageCallback* callback) override;
93 int32_t Release() override;
94 int32_t SetChannelParameters(uint32_t /* packet_loss */,
95 int64_t /* rtt */) override;
96 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
97
98 // rtc::MessageHandler implementation.
99 void OnMessage(rtc::Message* msg) override;
100
jackychen61b4d512015-04-21 15:30:11 -0700101 void OnDroppedFrame() override;
102
jackychen6e2ce6e2015-07-13 16:26:33 -0700103 int GetTargetFramerate() override;
104
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000105 private:
106 // CHECK-fail if not running on |codec_thread_|.
107 void CheckOnCodecThread();
108
109 // Release() and InitEncode() in an attempt to restore the codec to an
110 // operable state. Necessary after all manner of OMX-layer errors.
111 void ResetCodec();
112
113 // Implementation of webrtc::VideoEncoder methods above, all running on the
114 // codec thread exclusively.
115 //
116 // If width==0 then this is assumed to be a re-initialization and the
117 // previously-current values are reused instead of the passed parameters
118 // (makes it easier to reason about thread-safety).
119 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
120 int32_t EncodeOnCodecThread(
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -0700121 const webrtc::VideoFrame& input_image,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000122 const std::vector<webrtc::VideoFrameType>* frame_types);
123 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
124 webrtc::EncodedImageCallback* callback);
125 int32_t ReleaseOnCodecThread();
126 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
127
128 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
129 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
130 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
131 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
132 jlong GetOutputBufferInfoPresentationTimestampUs(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000133 JNIEnv* jni, jobject j_output_buffer_info);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000134
135 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
136 // true on success.
137 bool DeliverPendingOutputs(JNIEnv* jni);
138
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000139 // Search for H.264 start codes.
140 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
141
142 // Type of video codec.
143 VideoCodecType codecType_;
144
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000145 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
146 // |codec_thread_| synchronously.
147 webrtc::EncodedImageCallback* callback_;
148
149 // State that is constant for the lifetime of this object once the ctor
150 // returns.
151 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
152 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
153 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
154 jmethodID j_init_encode_method_;
155 jmethodID j_dequeue_input_buffer_method_;
156 jmethodID j_encode_method_;
157 jmethodID j_release_method_;
158 jmethodID j_set_rates_method_;
159 jmethodID j_dequeue_output_buffer_method_;
160 jmethodID j_release_output_buffer_method_;
161 jfieldID j_color_format_field_;
162 jfieldID j_info_index_field_;
163 jfieldID j_info_buffer_field_;
164 jfieldID j_info_is_key_frame_field_;
165 jfieldID j_info_presentation_timestamp_us_field_;
166
167 // State that is valid only between InitEncode() and the next Release().
168 // Touched only on codec_thread_ so no explicit synchronization necessary.
169 int width_; // Frame width in pixels.
170 int height_; // Frame height in pixels.
171 bool inited_;
172 uint16_t picture_id_;
173 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
174 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
175 int last_set_fps_; // Last-requested frame rate.
176 int64_t current_timestamp_us_; // Current frame timestamps in us.
177 int frames_received_; // Number of frames received by encoder.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000178 int frames_encoded_; // Number of frames encoded by encoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000179 int frames_dropped_; // Number of frames dropped by encoder.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000180 int frames_in_queue_; // Number of frames in encoder queue.
181 int64_t start_time_ms_; // Start time for statistics.
182 int current_frames_; // Number of frames in the current statistics interval.
183 int current_bytes_; // Encoded bytes in the current statistics interval.
184 int current_encoding_time_ms_; // Overall encoding time in the current second
185 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
186 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
187 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
188 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
189 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
190 // encoder input.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000191 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
192 int64_t output_render_time_ms_; // Last output frame render time from
193 // render_times_ms_ queue.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000194 // Frame size in bytes fed to MediaCodec.
195 int yuv_size_;
196 // True only when between a callback_->Encoded() call return a positive value
197 // and the next Encode() call being ignored.
198 bool drop_next_input_frame_;
199 // Global references; must be deleted in Release().
200 std::vector<jobject> input_buffers_;
jackychen61b4d512015-04-21 15:30:11 -0700201 scoped_ptr<webrtc::QualityScaler> quality_scaler_;
jackychen61b4d512015-04-21 15:30:11 -0700202 // Dynamic resolution change, off by default.
203 bool scale_;
jackychen6e2ce6e2015-07-13 16:26:33 -0700204 int updated_framerate_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000205};
206
207MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
208 // Call Release() to ensure no more callbacks to us after we are deleted.
209 Release();
210}
211
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000212MediaCodecVideoEncoder::MediaCodecVideoEncoder(
213 JNIEnv* jni, VideoCodecType codecType) :
214 codecType_(codecType),
215 callback_(NULL),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000216 inited_(false),
217 picture_id_(0),
218 codec_thread_(new Thread()),
jackychen61b4d512015-04-21 15:30:11 -0700219 quality_scaler_(new webrtc::QualityScaler()),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000220 j_media_codec_video_encoder_class_(
221 jni,
222 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
223 j_media_codec_video_encoder_(
224 jni,
225 jni->NewObject(*j_media_codec_video_encoder_class_,
226 GetMethodID(jni,
227 *j_media_codec_video_encoder_class_,
228 "<init>",
229 "()V"))) {
230 ScopedLocalRefFrame local_ref_frame(jni);
231 // It would be nice to avoid spinning up a new thread per MediaCodec, and
232 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
233 // 2732 means that deadlocks abound. This class synchronously trampolines
234 // to |codec_thread_|, so if anything else can be coming to _us_ from
235 // |codec_thread_|, or from any thread holding the |_sendCritSect| described
236 // in the bug, we have a problem. For now work around that with a dedicated
237 // thread.
238 codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
239 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
240
241 jclass j_output_buffer_info_class =
242 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000243 j_init_encode_method_ = GetMethodID(
244 jni,
245 *j_media_codec_video_encoder_class_,
246 "initEncode",
247 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
248 "[Ljava/nio/ByteBuffer;");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000249 j_dequeue_input_buffer_method_ = GetMethodID(
250 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
251 j_encode_method_ = GetMethodID(
252 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
253 j_release_method_ =
254 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
255 j_set_rates_method_ = GetMethodID(
256 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000257 j_dequeue_output_buffer_method_ = GetMethodID(
258 jni,
259 *j_media_codec_video_encoder_class_,
260 "dequeueOutputBuffer",
261 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000262 j_release_output_buffer_method_ = GetMethodID(
263 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
264
265 j_color_format_field_ =
266 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
267 j_info_index_field_ =
268 GetFieldID(jni, j_output_buffer_info_class, "index", "I");
269 j_info_buffer_field_ = GetFieldID(
270 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
271 j_info_is_key_frame_field_ =
272 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
273 j_info_presentation_timestamp_us_field_ = GetFieldID(
274 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
275 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
276 AllowBlockingCalls();
277}
278
279int32_t MediaCodecVideoEncoder::InitEncode(
280 const webrtc::VideoCodec* codec_settings,
281 int32_t /* number_of_cores */,
282 size_t /* max_payload_size */) {
jackychen61b4d512015-04-21 15:30:11 -0700283 const int kMinWidth = 320;
284 const int kMinHeight = 180;
jackychen98d8cf52015-05-21 11:12:02 -0700285 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
286 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
287 // always = 127. Note that in SW, QP is that of the user-level range [0, 63].
288 const int kMaxQP = 127;
289 const int kLowQpThresholdDenominator = 3;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000290 if (codec_settings == NULL) {
291 ALOGE("NULL VideoCodec instance");
292 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
293 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000294 // Factory should guard against other codecs being used with us.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000295 CHECK(codec_settings->codecType == codecType_) << "Unsupported codec " <<
296 codec_settings->codecType << " for " << codecType_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000297
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000298 ALOGD("InitEncode request");
jackychen61b4d512015-04-21 15:30:11 -0700299 scale_ = false;
jackychen6e2ce6e2015-07-13 16:26:33 -0700300 if (scale_ && codecType_ == kVideoCodecVP8) {
301 quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator, true);
jackychen98d8cf52015-05-21 11:12:02 -0700302 quality_scaler_->SetMinResolution(kMinWidth, kMinHeight);
303 quality_scaler_->ReportFramerate(codec_settings->maxFramerate);
jackychen61b4d512015-04-21 15:30:11 -0700304 }
jackychen6e2ce6e2015-07-13 16:26:33 -0700305 updated_framerate_ = codec_settings->maxFramerate;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000306 return codec_thread_->Invoke<int32_t>(
307 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
308 this,
309 codec_settings->width,
310 codec_settings->height,
311 codec_settings->startBitrate,
312 codec_settings->maxFramerate));
313}
314
315int32_t MediaCodecVideoEncoder::Encode(
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -0700316 const webrtc::VideoFrame& frame,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000317 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
318 const std::vector<webrtc::VideoFrameType>* frame_types) {
319 return codec_thread_->Invoke<int32_t>(Bind(
320 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
321}
322
323int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
324 webrtc::EncodedImageCallback* callback) {
325 return codec_thread_->Invoke<int32_t>(
326 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
327 this,
328 callback));
329}
330
331int32_t MediaCodecVideoEncoder::Release() {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000332 ALOGD("EncoderRelease request");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000333 return codec_thread_->Invoke<int32_t>(
334 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
335}
336
337int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
338 int64_t /* rtt */) {
339 return WEBRTC_VIDEO_CODEC_OK;
340}
341
342int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
343 uint32_t frame_rate) {
jackychen6e2ce6e2015-07-13 16:26:33 -0700344 if (scale_ && codecType_ == kVideoCodecVP8) {
jackychen98d8cf52015-05-21 11:12:02 -0700345 quality_scaler_->ReportFramerate(frame_rate);
jackychen6e2ce6e2015-07-13 16:26:33 -0700346 } else {
347 updated_framerate_ = frame_rate;
348 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000349 return codec_thread_->Invoke<int32_t>(
350 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
351 this,
352 new_bit_rate,
353 frame_rate));
354}
355
356void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
357 JNIEnv* jni = AttachCurrentThreadIfNeeded();
358 ScopedLocalRefFrame local_ref_frame(jni);
359
360 // We only ever send one message to |this| directly (not through a Bind()'d
361 // functor), so expect no ID/data.
362 CHECK(!msg->message_id) << "Unexpected message!";
363 CHECK(!msg->pdata) << "Unexpected message!";
364 CheckOnCodecThread();
365 if (!inited_) {
366 return;
367 }
368
369 // It would be nice to recover from a failure here if one happened, but it's
370 // unclear how to signal such a failure to the app, so instead we stay silent
371 // about it and let the next app-called API method reveal the borkedness.
372 DeliverPendingOutputs(jni);
373 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
374}
375
376void MediaCodecVideoEncoder::CheckOnCodecThread() {
377 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
378 << "Running on wrong thread!";
379}
380
381void MediaCodecVideoEncoder::ResetCodec() {
382 ALOGE("ResetCodec");
383 if (Release() != WEBRTC_VIDEO_CODEC_OK ||
384 codec_thread_->Invoke<int32_t>(Bind(
385 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
386 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
387 // TODO(fischman): wouldn't it be nice if there was a way to gracefully
388 // degrade to a SW encoder at this point? There isn't one AFAICT :(
389 // https://code.google.com/p/webrtc/issues/detail?id=2920
390 }
391}
392
393int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
394 int width, int height, int kbps, int fps) {
395 CheckOnCodecThread();
396 JNIEnv* jni = AttachCurrentThreadIfNeeded();
397 ScopedLocalRefFrame local_ref_frame(jni);
398
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000399 ALOGD("InitEncodeOnCodecThread Type: %d. %d x %d. Bitrate: %d kbps. Fps: %d",
400 (int)codecType_, width, height, kbps, fps);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000401 if (kbps == 0) {
402 kbps = last_set_bitrate_kbps_;
403 }
404 if (fps == 0) {
405 fps = last_set_fps_;
406 }
407
408 width_ = width;
409 height_ = height;
410 last_set_bitrate_kbps_ = kbps;
411 last_set_fps_ = fps;
412 yuv_size_ = width_ * height_ * 3 / 2;
413 frames_received_ = 0;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000414 frames_encoded_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000415 frames_dropped_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000416 frames_in_queue_ = 0;
417 current_timestamp_us_ = 0;
418 start_time_ms_ = GetCurrentTimeMs();
419 current_frames_ = 0;
420 current_bytes_ = 0;
421 current_encoding_time_ms_ = 0;
422 last_input_timestamp_ms_ = -1;
423 last_output_timestamp_ms_ = -1;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000424 output_timestamp_ = 0;
425 output_render_time_ms_ = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000426 timestamps_.clear();
427 render_times_ms_.clear();
428 frame_rtc_times_ms_.clear();
429 drop_next_input_frame_ = false;
430 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
431 // We enforce no extra stride/padding in the format creation step.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000432 jobject j_video_codec_enum = JavaEnumFromIndex(
433 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000434 jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
435 jni->CallObjectMethod(*j_media_codec_video_encoder_,
436 j_init_encode_method_,
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000437 j_video_codec_enum,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000438 width_,
439 height_,
440 kbps,
441 fps));
442 CHECK_EXCEPTION(jni);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000443 if (IsNull(jni, input_buffers)) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000444 return WEBRTC_VIDEO_CODEC_ERROR;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000445 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000446
447 inited_ = true;
448 switch (GetIntField(jni, *j_media_codec_video_encoder_,
449 j_color_format_field_)) {
450 case COLOR_FormatYUV420Planar:
451 encoder_fourcc_ = libyuv::FOURCC_YU12;
452 break;
453 case COLOR_FormatYUV420SemiPlanar:
454 case COLOR_QCOM_FormatYUV420SemiPlanar:
455 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
456 encoder_fourcc_ = libyuv::FOURCC_NV12;
457 break;
458 default:
459 LOG(LS_ERROR) << "Wrong color format.";
460 return WEBRTC_VIDEO_CODEC_ERROR;
461 }
462 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
463 CHECK(input_buffers_.empty())
464 << "Unexpected double InitEncode without Release";
465 input_buffers_.resize(num_input_buffers);
466 for (size_t i = 0; i < num_input_buffers; ++i) {
467 input_buffers_[i] =
468 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
469 int64 yuv_buffer_capacity =
470 jni->GetDirectBufferCapacity(input_buffers_[i]);
471 CHECK_EXCEPTION(jni);
472 CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
473 }
474 CHECK_EXCEPTION(jni);
475
476 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
477 return WEBRTC_VIDEO_CODEC_OK;
478}
479
480int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
Miguel Casas-Sanchez47650702015-05-29 17:21:40 -0700481 const webrtc::VideoFrame& frame,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000482 const std::vector<webrtc::VideoFrameType>* frame_types) {
483 CheckOnCodecThread();
484 JNIEnv* jni = AttachCurrentThreadIfNeeded();
485 ScopedLocalRefFrame local_ref_frame(jni);
486
487 if (!inited_) {
488 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
489 }
490 frames_received_++;
491 if (!DeliverPendingOutputs(jni)) {
492 ResetCodec();
493 // Continue as if everything's fine.
494 }
495
496 if (drop_next_input_frame_) {
497 ALOGV("Encoder drop frame - failed callback.");
498 drop_next_input_frame_ = false;
499 return WEBRTC_VIDEO_CODEC_OK;
500 }
501
502 CHECK(frame_types->size() == 1) << "Unexpected stream count";
jackychen6e2ce6e2015-07-13 16:26:33 -0700503 // Check framerate before spatial resolution change.
504 if (scale_ && codecType_ == kVideoCodecVP8) {
505 quality_scaler_->OnEncodeFrame(frame);
506 updated_framerate_ = quality_scaler_->GetTargetFramerate();
507 }
508 const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8) ?
509 quality_scaler_->GetScaledFrame(frame) : frame;
jackychen61b4d512015-04-21 15:30:11 -0700510
511 if (input_frame.width() != width_ || input_frame.height() != height_) {
512 ALOGD("Frame resolution change from %d x %d to %d x %d",
513 width_, height_, input_frame.width(), input_frame.height());
514 width_ = input_frame.width();
515 height_ = input_frame.height();
516 ResetCodec();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000517 return WEBRTC_VIDEO_CODEC_OK;
518 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000519
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000520 // Check if we accumulated too many frames in encoder input buffers
521 // or the encoder latency exceeds 70 ms and drop frame if so.
522 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
523 int encoder_latency_ms = last_input_timestamp_ms_ -
524 last_output_timestamp_ms_;
525 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
526 ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
527 encoder_latency_ms, frames_in_queue_);
528 frames_dropped_++;
jackychen61b4d512015-04-21 15:30:11 -0700529 // Report dropped frame to quality_scaler_.
530 OnDroppedFrame();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000531 return WEBRTC_VIDEO_CODEC_OK;
532 }
533 }
534
535 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
536 j_dequeue_input_buffer_method_);
537 CHECK_EXCEPTION(jni);
538 if (j_input_buffer_index == -1) {
539 // Video codec falls behind - no input buffer available.
540 ALOGV("Encoder drop frame - no input buffers available");
541 frames_dropped_++;
jackychen61b4d512015-04-21 15:30:11 -0700542 // Report dropped frame to quality_scaler_.
543 OnDroppedFrame();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000544 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
545 }
546 if (j_input_buffer_index == -2) {
547 ResetCodec();
548 return WEBRTC_VIDEO_CODEC_ERROR;
549 }
550
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000551 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
552 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000553
554 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
555 uint8* yuv_buffer =
556 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
557 CHECK_EXCEPTION(jni);
558 CHECK(yuv_buffer) << "Indirect buffer??";
559 CHECK(!libyuv::ConvertFromI420(
jackychen61b4d512015-04-21 15:30:11 -0700560 input_frame.buffer(webrtc::kYPlane),
561 input_frame.stride(webrtc::kYPlane),
562 input_frame.buffer(webrtc::kUPlane),
563 input_frame.stride(webrtc::kUPlane),
564 input_frame.buffer(webrtc::kVPlane),
565 input_frame.stride(webrtc::kVPlane),
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000566 yuv_buffer, width_,
567 width_, height_,
568 encoder_fourcc_))
569 << "ConvertFromI420 failed";
570 last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
571 frames_in_queue_++;
572
573 // Save input image timestamps for later output
jackychen61b4d512015-04-21 15:30:11 -0700574 timestamps_.push_back(input_frame.timestamp());
575 render_times_ms_.push_back(input_frame.render_time_ms());
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000576 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
577
jackychen6e2ce6e2015-07-13 16:26:33 -0700578 bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000579 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
580 j_encode_method_,
581 key_frame,
582 j_input_buffer_index,
583 yuv_size_,
584 current_timestamp_us_);
585 CHECK_EXCEPTION(jni);
586 current_timestamp_us_ += 1000000 / last_set_fps_;
587
588 if (!encode_status || !DeliverPendingOutputs(jni)) {
589 ResetCodec();
590 return WEBRTC_VIDEO_CODEC_ERROR;
591 }
592
593 return WEBRTC_VIDEO_CODEC_OK;
594}
595
596int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
597 webrtc::EncodedImageCallback* callback) {
598 CheckOnCodecThread();
599 JNIEnv* jni = AttachCurrentThreadIfNeeded();
600 ScopedLocalRefFrame local_ref_frame(jni);
601 callback_ = callback;
602 return WEBRTC_VIDEO_CODEC_OK;
603}
604
605int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
606 if (!inited_) {
607 return WEBRTC_VIDEO_CODEC_OK;
608 }
609 CheckOnCodecThread();
610 JNIEnv* jni = AttachCurrentThreadIfNeeded();
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000611 ALOGD("EncoderReleaseOnCodecThread: Frames received: %d. Encoded: %d. "
612 "Dropped: %d.", frames_received_, frames_encoded_, frames_dropped_);
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000613 ScopedLocalRefFrame local_ref_frame(jni);
614 for (size_t i = 0; i < input_buffers_.size(); ++i)
615 jni->DeleteGlobalRef(input_buffers_[i]);
616 input_buffers_.clear();
617 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
618 CHECK_EXCEPTION(jni);
619 rtc::MessageQueueManager::Clear(this);
620 inited_ = false;
621 return WEBRTC_VIDEO_CODEC_OK;
622}
623
624int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
625 uint32_t frame_rate) {
626 CheckOnCodecThread();
627 if (last_set_bitrate_kbps_ == new_bit_rate &&
628 last_set_fps_ == frame_rate) {
629 return WEBRTC_VIDEO_CODEC_OK;
630 }
631 JNIEnv* jni = AttachCurrentThreadIfNeeded();
632 ScopedLocalRefFrame local_ref_frame(jni);
633 if (new_bit_rate > 0) {
634 last_set_bitrate_kbps_ = new_bit_rate;
635 }
636 if (frame_rate > 0) {
637 last_set_fps_ = frame_rate;
638 }
639 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
640 j_set_rates_method_,
641 last_set_bitrate_kbps_,
642 last_set_fps_);
643 CHECK_EXCEPTION(jni);
644 if (!ret) {
645 ResetCodec();
646 return WEBRTC_VIDEO_CODEC_ERROR;
647 }
648 return WEBRTC_VIDEO_CODEC_OK;
649}
650
651int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
652 JNIEnv* jni,
653 jobject j_output_buffer_info) {
654 return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
655}
656
657jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
658 JNIEnv* jni,
659 jobject j_output_buffer_info) {
660 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
661}
662
663bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
664 JNIEnv* jni,
665 jobject j_output_buffer_info) {
666 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
667}
668
669jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
670 JNIEnv* jni,
671 jobject j_output_buffer_info) {
672 return GetLongField(
673 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
674}
675
676bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
677 while (true) {
678 jobject j_output_buffer_info = jni->CallObjectMethod(
679 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
680 CHECK_EXCEPTION(jni);
681 if (IsNull(jni, j_output_buffer_info)) {
682 break;
683 }
684
685 int output_buffer_index =
686 GetOutputBufferInfoIndex(jni, j_output_buffer_info);
687 if (output_buffer_index == -1) {
688 ResetCodec();
689 return false;
690 }
691
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000692 // Get key and config frame flags.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000693 jobject j_output_buffer =
694 GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
695 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000696
697 // Get frame timestamps from a queue - for non config frames only.
698 int64_t frame_encoding_time_ms = 0;
699 last_output_timestamp_ms_ =
700 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
701 1000;
702 if (frames_in_queue_ > 0) {
703 output_timestamp_ = timestamps_.front();
704 timestamps_.erase(timestamps_.begin());
705 output_render_time_ms_ = render_times_ms_.front();
706 render_times_ms_.erase(render_times_ms_.begin());
707 frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
708 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
709 frames_in_queue_--;
710 }
711
712 // Extract payload.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000713 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
714 uint8* payload = reinterpret_cast<uint8_t*>(
715 jni->GetDirectBufferAddress(j_output_buffer));
716 CHECK_EXCEPTION(jni);
717
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000718 ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld."
719 " Latency: %lld. EncTime: %lld",
720 frames_encoded_, key_frame, payload_size,
721 last_output_timestamp_ms_,
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000722 last_input_timestamp_ms_ - last_output_timestamp_ms_,
723 frame_encoding_time_ms);
724
jackychen6e2ce6e2015-07-13 16:26:33 -0700725 if (payload_size && scale_ && codecType_ == kVideoCodecVP8)
jackychen98d8cf52015-05-21 11:12:02 -0700726 quality_scaler_->ReportQP(webrtc::vp8::GetQP(payload));
jackychen61b4d512015-04-21 15:30:11 -0700727
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000728 // Calculate and print encoding statistics - every 3 seconds.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000729 frames_encoded_++;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000730 current_frames_++;
731 current_bytes_ += payload_size;
732 current_encoding_time_ms_ += frame_encoding_time_ms;
733 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
734 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
735 current_frames_ > 0) {
736 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
737 " encTime: %d for last %d ms",
738 current_bytes_ * 8 / statistic_time_ms,
739 last_set_bitrate_kbps_,
740 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
741 current_encoding_time_ms_ / current_frames_, statistic_time_ms);
742 start_time_ms_ = GetCurrentTimeMs();
743 current_frames_ = 0;
744 current_bytes_ = 0;
745 current_encoding_time_ms_ = 0;
746 }
747
748 // Callback - return encoded frame.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000749 int32_t callback_status = 0;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000750 if (callback_) {
751 scoped_ptr<webrtc::EncodedImage> image(
752 new webrtc::EncodedImage(payload, payload_size, payload_size));
753 image->_encodedWidth = width_;
754 image->_encodedHeight = height_;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000755 image->_timeStamp = output_timestamp_;
756 image->capture_time_ms_ = output_render_time_ms_;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000757 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
758 image->_completeFrame = true;
759
760 webrtc::CodecSpecificInfo info;
761 memset(&info, 0, sizeof(info));
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000762 info.codecType = codecType_;
763 if (codecType_ == kVideoCodecVP8) {
764 info.codecSpecific.VP8.pictureId = picture_id_;
765 info.codecSpecific.VP8.nonReference = false;
766 info.codecSpecific.VP8.simulcastIdx = 0;
767 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
768 info.codecSpecific.VP8.layerSync = false;
769 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
770 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
771 picture_id_ = (picture_id_ + 1) & 0x7FFF;
772 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000773
774 // Generate a header describing a single fragment.
775 webrtc::RTPFragmentationHeader header;
776 memset(&header, 0, sizeof(header));
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000777 if (codecType_ == kVideoCodecVP8) {
778 header.VerifyAndAllocateFragmentationHeader(1);
779 header.fragmentationOffset[0] = 0;
780 header.fragmentationLength[0] = image->_length;
781 header.fragmentationPlType[0] = 0;
782 header.fragmentationTimeDiff[0] = 0;
783 } else if (codecType_ == kVideoCodecH264) {
784 // For H.264 search for start codes.
785 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
786 int32_t scPositionsLength = 0;
787 int32_t scPosition = 0;
788 while (scPositionsLength < MAX_NALUS_PERFRAME) {
789 int32_t naluPosition = NextNaluPosition(
790 payload + scPosition, payload_size - scPosition);
791 if (naluPosition < 0) {
792 break;
793 }
794 scPosition += naluPosition;
795 scPositions[scPositionsLength++] = scPosition;
796 scPosition += H264_SC_LENGTH;
797 }
798 if (scPositionsLength == 0) {
799 ALOGE("Start code is not found!");
800 ALOGE("Data 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
801 image->_buffer[0], image->_buffer[1], image->_buffer[2],
802 image->_buffer[3], image->_buffer[4], image->_buffer[5]);
803 ResetCodec();
804 return false;
805 }
806 scPositions[scPositionsLength] = payload_size;
807 header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
808 for (size_t i = 0; i < scPositionsLength; i++) {
809 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
810 header.fragmentationLength[i] =
811 scPositions[i + 1] - header.fragmentationOffset[i];
812 header.fragmentationPlType[i] = 0;
813 header.fragmentationTimeDiff[i] = 0;
814 }
815 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000816
817 callback_status = callback_->Encoded(*image, &info, &header);
818 }
819
820 // Return output buffer back to the encoder.
821 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
822 j_release_output_buffer_method_,
823 output_buffer_index);
824 CHECK_EXCEPTION(jni);
825 if (!success) {
826 ResetCodec();
827 return false;
828 }
829
830 if (callback_status > 0) {
831 drop_next_input_frame_ = true;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000832 // Theoretically could handle callback_status<0 here, but unclear what
833 // that would mean for us.
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000834 }
835 }
836
837 return true;
838}
839
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000840int32_t MediaCodecVideoEncoder::NextNaluPosition(
841 uint8_t *buffer, size_t buffer_size) {
842 if (buffer_size < H264_SC_LENGTH) {
843 return -1;
844 }
845 uint8_t *head = buffer;
846 // Set end buffer pointer to 4 bytes before actual buffer end so we can
847 // access head[1], head[2] and head[3] in a loop without buffer overrun.
848 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
849
850 while (head < end) {
851 if (head[0]) {
852 head++;
853 continue;
854 }
855 if (head[1]) { // got 00xx
856 head += 2;
857 continue;
858 }
859 if (head[2]) { // got 0000xx
860 head += 3;
861 continue;
862 }
863 if (head[3] != 0x01) { // got 000000xx
glaznev@webrtc.orgdc08a232015-03-06 23:32:20 +0000864 head++; // xx != 1, continue searching.
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000865 continue;
866 }
867 return (int32_t)(head - buffer);
868 }
869 return -1;
870}
871
jackychen61b4d512015-04-21 15:30:11 -0700872void MediaCodecVideoEncoder::OnDroppedFrame() {
jackychen6e2ce6e2015-07-13 16:26:33 -0700873 if (scale_ && codecType_ == kVideoCodecVP8)
jackychen98d8cf52015-05-21 11:12:02 -0700874 quality_scaler_->ReportDroppedFrame();
jackychen61b4d512015-04-21 15:30:11 -0700875}
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000876
jackychen6e2ce6e2015-07-13 16:26:33 -0700877int MediaCodecVideoEncoder::GetTargetFramerate() {
878 return updated_framerate_;
879}
880
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000881MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
882 JNIEnv* jni = AttachCurrentThreadIfNeeded();
883 ScopedLocalRefFrame local_ref_frame(jni);
884 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000885 supported_codecs_.clear();
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000886
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000887 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
888 j_encoder_class,
889 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
890 CHECK_EXCEPTION(jni);
891 if (is_vp8_hw_supported) {
892 ALOGD("VP8 HW Encoder supported.");
893 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
894 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
895 }
896
897 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
898 j_encoder_class,
899 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
900 CHECK_EXCEPTION(jni);
901 if (is_h264_hw_supported) {
902 ALOGD("H.264 HW Encoder supported.");
903 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
904 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
905 }
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000906}
907
908MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
909
910webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000911 VideoCodecType type) {
912 if (supported_codecs_.empty()) {
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000913 return NULL;
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000914 }
915 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
916 it != supported_codecs_.end(); ++it) {
917 if (it->type == type) {
918 ALOGD("Create HW video encoder for type %d (%s).",
919 (int)type, it->name.c_str());
920 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
921 }
922 }
923 return NULL;
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000924}
925
926const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
927MediaCodecVideoEncoderFactory::codecs() const {
928 return supported_codecs_;
929}
930
931void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
932 webrtc::VideoEncoder* encoder) {
glaznev@webrtc.orgb28474c2015-02-23 17:44:27 +0000933 ALOGD("Destroy video encoder.");
glaznev@webrtc.org18c92472015-02-18 18:42:55 +0000934 delete encoder;
935}
936
937} // namespace webrtc_jni
938