blob: 0938ae6ab65c770e81cef14f936f98558dcfc463 [file] [log] [blame]
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001/*
2 * libjingle
3 * Copyright 2011 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/media/webrtc/webrtcvideoframe.h"
29
30#include "libyuv/convert.h"
31#include "libyuv/convert_from.h"
32#include "libyuv/planar_functions.h"
33#include "talk/base/logging.h"
34#include "talk/media/base/videocapturer.h"
35#include "talk/media/base/videocommon.h"
36
37namespace cricket {
38
39static const int kWatermarkWidth = 8;
40static const int kWatermarkHeight = 8;
41static const int kWatermarkOffsetFromLeft = 8;
42static const int kWatermarkOffsetFromBottom = 8;
43static const unsigned char kWatermarkMaxYValue = 64;
44
wu@webrtc.orgde305012013-10-31 15:40:38 +000045FrameBuffer::FrameBuffer() {}
henrike@webrtc.org28e20752013-07-10 00:45:36 +000046
wu@webrtc.orgde305012013-10-31 15:40:38 +000047FrameBuffer::FrameBuffer(size_t length) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +000048 char* buffer = new char[length];
49 SetData(buffer, length);
50}
51
wu@webrtc.orgde305012013-10-31 15:40:38 +000052FrameBuffer::~FrameBuffer() {}
henrike@webrtc.org28e20752013-07-10 00:45:36 +000053
54void FrameBuffer::SetData(char* data, size_t length) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +000055 uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
wu@webrtc.orgde305012013-10-31 15:40:38 +000056 uint32_t new_length = static_cast<uint32_t>(length);
57 uint32_t new_size = static_cast<uint32_t>(length);
henrike@webrtc.org28e20752013-07-10 00:45:36 +000058 video_frame_.Swap(new_memory, new_length, new_size);
59}
60
61void FrameBuffer::ReturnData(char** data, size_t* length) {
wu@webrtc.orgde305012013-10-31 15:40:38 +000062 *data = NULL;
henrike@webrtc.org28e20752013-07-10 00:45:36 +000063 uint32_t old_length = 0;
64 uint32_t old_size = 0;
wu@webrtc.orgde305012013-10-31 15:40:38 +000065 video_frame_.Swap(reinterpret_cast<uint8_t*&>(*data),
66 old_length, old_size);
henrike@webrtc.org28e20752013-07-10 00:45:36 +000067 *length = old_length;
henrike@webrtc.org28e20752013-07-10 00:45:36 +000068}
69
wu@webrtc.orgde305012013-10-31 15:40:38 +000070char* FrameBuffer::data() {
71 return reinterpret_cast<char*>(video_frame_.Buffer());
72}
henrike@webrtc.org28e20752013-07-10 00:45:36 +000073
wu@webrtc.orgde305012013-10-31 15:40:38 +000074size_t FrameBuffer::length() const {
75 return static_cast<size_t>(video_frame_.Length());
76}
henrike@webrtc.org28e20752013-07-10 00:45:36 +000077
78webrtc::VideoFrame* FrameBuffer::frame() { return &video_frame_; }
79
80const webrtc::VideoFrame* FrameBuffer::frame() const { return &video_frame_; }
81
82WebRtcVideoFrame::WebRtcVideoFrame()
83 : video_buffer_(new RefCountedBuffer()), is_black_(false) {}
84
85WebRtcVideoFrame::~WebRtcVideoFrame() {}
86
87bool WebRtcVideoFrame::Init(
88 uint32 format, int w, int h, int dw, int dh, uint8* sample,
89 size_t sample_size, size_t pixel_width, size_t pixel_height,
90 int64 elapsed_time, int64 time_stamp, int rotation) {
91 return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
92 pixel_height, elapsed_time, time_stamp, rotation);
93}
94
95bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
96 return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
97 static_cast<uint8*>(frame->data), frame->data_size,
98 frame->pixel_width, frame->pixel_height, frame->elapsed_time,
99 frame->time_stamp, frame->rotation);
100}
101
102bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
103 size_t pixel_height, int64 elapsed_time,
104 int64 time_stamp) {
105 InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
106 if (!is_black_) {
107 return SetToBlack();
108 }
109 return true;
110}
111
112void WebRtcVideoFrame::Attach(
113 uint8* buffer, size_t buffer_size, int w, int h, size_t pixel_width,
114 size_t pixel_height, int64 elapsed_time, int64 time_stamp, int rotation) {
115 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
116 new RefCountedBuffer());
117 video_buffer->SetData(reinterpret_cast<char*>(buffer), buffer_size);
118 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
119 elapsed_time, time_stamp, rotation);
120}
121
122void WebRtcVideoFrame::Detach(uint8** data, size_t* length) {
123 video_buffer_->ReturnData(reinterpret_cast<char**>(data), length);
124}
125
126size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
127
128size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
129
130const uint8* WebRtcVideoFrame::GetYPlane() const {
131 uint8_t* buffer = frame()->Buffer();
132 return buffer;
133}
134
135const uint8* WebRtcVideoFrame::GetUPlane() const {
136 uint8_t* buffer = frame()->Buffer();
137 if (buffer) {
138 buffer += (frame()->Width() * frame()->Height());
139 }
140 return buffer;
141}
142
143const uint8* WebRtcVideoFrame::GetVPlane() const {
144 uint8_t* buffer = frame()->Buffer();
145 if (buffer) {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000146 int uv_size = static_cast<int>(GetChromaSize());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000147 buffer += frame()->Width() * frame()->Height() + uv_size;
148 }
149 return buffer;
150}
151
152uint8* WebRtcVideoFrame::GetYPlane() {
153 uint8_t* buffer = frame()->Buffer();
154 return buffer;
155}
156
157uint8* WebRtcVideoFrame::GetUPlane() {
158 uint8_t* buffer = frame()->Buffer();
159 if (buffer) {
160 buffer += (frame()->Width() * frame()->Height());
161 }
162 return buffer;
163}
164
165uint8* WebRtcVideoFrame::GetVPlane() {
166 uint8_t* buffer = frame()->Buffer();
167 if (buffer) {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000168 int uv_size = static_cast<int>(GetChromaSize());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000169 buffer += frame()->Width() * frame()->Height() + uv_size;
170 }
171 return buffer;
172}
173
174VideoFrame* WebRtcVideoFrame::Copy() const {
175 const char* old_buffer = video_buffer_->data();
176 if (!old_buffer)
177 return NULL;
178 size_t new_buffer_size = video_buffer_->length();
179
180 WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
181 ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
182 frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
183 time_stamp_, rotation_);
184 return ret_val;
185}
186
187bool WebRtcVideoFrame::MakeExclusive() {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000188 const int length = static_cast<int>(video_buffer_->length());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000189 RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
190 memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
191 Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
192 pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
193 return true;
194}
195
196size_t WebRtcVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
197 if (!frame()->Buffer()) {
198 return 0;
199 }
200
201 size_t needed = frame()->Length();
202 if (needed <= size) {
203 memcpy(buffer, frame()->Buffer(), needed);
204 }
205 return needed;
206}
207
208// TODO(fbarchard): Refactor into base class and share with lmi
209size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
210 size_t size, int stride_rgb) const {
211 if (!frame()->Buffer()) {
212 return 0;
213 }
214 size_t width = frame()->Width();
215 size_t height = frame()->Height();
216 size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height;
217 if (size < needed) {
218 LOG(LS_WARNING) << "RGB buffer is not large enough";
219 return needed;
220 }
221
222 if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
223 GetUPitch(), GetVPlane(), GetVPitch(), buffer,
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000224 stride_rgb,
225 static_cast<int>(width),
226 static_cast<int>(height),
227 to_fourcc)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000228 LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
229 return 0; // 0 indicates error
230 }
231 return needed;
232}
233
234void WebRtcVideoFrame::Attach(
235 RefCountedBuffer* video_buffer, size_t buffer_size, int w, int h,
236 size_t pixel_width, size_t pixel_height, int64 elapsed_time,
237 int64 time_stamp, int rotation) {
238 if (video_buffer_.get() == video_buffer) {
239 return;
240 }
241 is_black_ = false;
242 video_buffer_ = video_buffer;
243 frame()->SetWidth(w);
244 frame()->SetHeight(h);
245 pixel_width_ = pixel_width;
246 pixel_height_ = pixel_height;
247 elapsed_time_ = elapsed_time;
248 time_stamp_ = time_stamp;
249 rotation_ = rotation;
250}
251
252// Add a square watermark near the left-low corner. clamp Y.
253// Returns false on error.
254bool WebRtcVideoFrame::AddWatermark() {
255 size_t w = GetWidth();
256 size_t h = GetHeight();
257
258 if (w < kWatermarkWidth + kWatermarkOffsetFromLeft ||
259 h < kWatermarkHeight + kWatermarkOffsetFromBottom) {
260 return false;
261 }
262
263 uint8* buffer = GetYPlane();
264 for (size_t x = kWatermarkOffsetFromLeft;
265 x < kWatermarkOffsetFromLeft + kWatermarkWidth; ++x) {
266 for (size_t y = h - kWatermarkOffsetFromBottom - kWatermarkHeight;
267 y < h - kWatermarkOffsetFromBottom; ++y) {
268 buffer[y * w + x] =
269 talk_base::_min(buffer[y * w + x], kWatermarkMaxYValue);
270 }
271 }
272 return true;
273}
274
275bool WebRtcVideoFrame::Reset(
276 uint32 format, int w, int h, int dw, int dh, uint8* sample,
277 size_t sample_size, size_t pixel_width, size_t pixel_height,
278 int64 elapsed_time, int64 time_stamp, int rotation) {
279 if (!Validate(format, w, h, sample, sample_size)) {
280 return false;
281 }
282 // Translate aliases to standard enums (e.g., IYUV -> I420).
283 format = CanonicalFourCC(format);
284
285 // Round display width and height down to multiple of 4, to avoid webrtc
286 // size calculation error on odd sizes.
287 // TODO(Ronghua): Remove this once the webrtc allocator is fixed.
288 dw = (dw > 4) ? (dw & ~3) : dw;
289 dh = (dh > 4) ? (dh & ~3) : dh;
290
291 // Set up a new buffer.
292 // TODO(fbarchard): Support lazy allocation.
293 int new_width = dw;
294 int new_height = dh;
295 if (rotation == 90 || rotation == 270) { // If rotated swap width, height.
296 new_width = dh;
297 new_height = dw;
298 }
299
300 size_t desired_size = SizeOf(new_width, new_height);
301 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
302 new RefCountedBuffer(desired_size));
303 // Since the libyuv::ConvertToI420 will handle the rotation, so the
304 // new frame's rotation should always be 0.
305 Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
306 pixel_height, elapsed_time, time_stamp, 0);
307
308 int horiz_crop = ((w - dw) / 2) & ~1;
309 // ARGB on Windows has negative height.
310 // The sample's layout in memory is normal, so just correct crop.
311 int vert_crop = ((abs(h) - dh) / 2) & ~1;
312 // Conversion functions expect negative height to flip the image.
313 int idh = (h < 0) ? -dh : dh;
314 uint8* y = GetYPlane();
315 int y_stride = GetYPitch();
316 uint8* u = GetUPlane();
317 int u_stride = GetUPitch();
318 uint8* v = GetVPlane();
319 int v_stride = GetVPitch();
320 int r = libyuv::ConvertToI420(
321 sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
322 vert_crop, w, h, dw, idh, static_cast<libyuv::RotationMode>(rotation),
323 format);
324 if (r) {
325 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format)
326 << " return code : " << r;
327 return false;
328 }
329 return true;
330}
331
332VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
333 int w, int h, size_t pixel_width, size_t pixel_height, int64 elapsed_time,
334 int64 time_stamp) const {
335 WebRtcVideoFrame* frame = new WebRtcVideoFrame();
336 frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
337 time_stamp);
338 return frame;
339}
340
341void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
342 size_t pixel_height,
343 int64 elapsed_time, int64 time_stamp) {
344 size_t buffer_size = VideoFrame::SizeOf(w, h);
345 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
346 new RefCountedBuffer(buffer_size));
347 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
348 elapsed_time, time_stamp, 0);
349}
350
351} // namespace cricket