blob: 584aac0c3f3002c7bef88e777856f3425e1774a0 [file] [log] [blame]
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001/*
2 * libjingle
3 * Copyright 2011 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/media/webrtc/webrtcvideoframe.h"
29
30#include "libyuv/convert.h"
31#include "libyuv/convert_from.h"
32#include "libyuv/planar_functions.h"
33#include "talk/base/logging.h"
34#include "talk/media/base/videocapturer.h"
35#include "talk/media/base/videocommon.h"
36
37namespace cricket {
38
39static const int kWatermarkWidth = 8;
40static const int kWatermarkHeight = 8;
41static const int kWatermarkOffsetFromLeft = 8;
42static const int kWatermarkOffsetFromBottom = 8;
43static const unsigned char kWatermarkMaxYValue = 64;
44
45FrameBuffer::FrameBuffer() : length_(0) {}
46
47FrameBuffer::FrameBuffer(size_t length) : length_(0) {
48 char* buffer = new char[length];
49 SetData(buffer, length);
50}
51
52FrameBuffer::~FrameBuffer() {
53 // Make sure that the video_frame_ doesn't delete the buffer as it may be
54 // shared between multiple WebRtcVideoFrame.
55 uint8_t* new_memory = NULL;
56 uint32_t new_length = 0;
57 uint32_t new_size = 0;
58 video_frame_.Swap(new_memory, new_length, new_size);
59}
60
61void FrameBuffer::SetData(char* data, size_t length) {
62 data_.reset(data);
63 length_ = length;
64 uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
henrike@webrtc.org28654cb2013-07-22 21:07:49 +000065 uint32_t new_length = static_cast<int>(length);
66 uint32_t new_size = static_cast<int>(length);
henrike@webrtc.org28e20752013-07-10 00:45:36 +000067 video_frame_.Swap(new_memory, new_length, new_size);
68}
69
70void FrameBuffer::ReturnData(char** data, size_t* length) {
71 uint8_t* old_memory = NULL;
72 uint32_t old_length = 0;
73 uint32_t old_size = 0;
74 video_frame_.Swap(old_memory, old_length, old_size);
75 data_.release();
76 length_ = 0;
77 *length = old_length;
78 *data = reinterpret_cast<char*>(old_memory);
79}
80
81char* FrameBuffer::data() { return data_.get(); }
82
83size_t FrameBuffer::length() const { return length_; }
84
85webrtc::VideoFrame* FrameBuffer::frame() { return &video_frame_; }
86
87const webrtc::VideoFrame* FrameBuffer::frame() const { return &video_frame_; }
88
89WebRtcVideoFrame::WebRtcVideoFrame()
90 : video_buffer_(new RefCountedBuffer()), is_black_(false) {}
91
92WebRtcVideoFrame::~WebRtcVideoFrame() {}
93
94bool WebRtcVideoFrame::Init(
95 uint32 format, int w, int h, int dw, int dh, uint8* sample,
96 size_t sample_size, size_t pixel_width, size_t pixel_height,
97 int64 elapsed_time, int64 time_stamp, int rotation) {
98 return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
99 pixel_height, elapsed_time, time_stamp, rotation);
100}
101
102bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
103 return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
104 static_cast<uint8*>(frame->data), frame->data_size,
105 frame->pixel_width, frame->pixel_height, frame->elapsed_time,
106 frame->time_stamp, frame->rotation);
107}
108
109bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
110 size_t pixel_height, int64 elapsed_time,
111 int64 time_stamp) {
112 InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
113 if (!is_black_) {
114 return SetToBlack();
115 }
116 return true;
117}
118
119void WebRtcVideoFrame::Attach(
120 uint8* buffer, size_t buffer_size, int w, int h, size_t pixel_width,
121 size_t pixel_height, int64 elapsed_time, int64 time_stamp, int rotation) {
122 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
123 new RefCountedBuffer());
124 video_buffer->SetData(reinterpret_cast<char*>(buffer), buffer_size);
125 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
126 elapsed_time, time_stamp, rotation);
127}
128
129void WebRtcVideoFrame::Detach(uint8** data, size_t* length) {
130 video_buffer_->ReturnData(reinterpret_cast<char**>(data), length);
131}
132
133size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
134
135size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
136
137const uint8* WebRtcVideoFrame::GetYPlane() const {
138 uint8_t* buffer = frame()->Buffer();
139 return buffer;
140}
141
142const uint8* WebRtcVideoFrame::GetUPlane() const {
143 uint8_t* buffer = frame()->Buffer();
144 if (buffer) {
145 buffer += (frame()->Width() * frame()->Height());
146 }
147 return buffer;
148}
149
150const uint8* WebRtcVideoFrame::GetVPlane() const {
151 uint8_t* buffer = frame()->Buffer();
152 if (buffer) {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000153 int uv_size = static_cast<int>(GetChromaSize());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000154 buffer += frame()->Width() * frame()->Height() + uv_size;
155 }
156 return buffer;
157}
158
159uint8* WebRtcVideoFrame::GetYPlane() {
160 uint8_t* buffer = frame()->Buffer();
161 return buffer;
162}
163
164uint8* WebRtcVideoFrame::GetUPlane() {
165 uint8_t* buffer = frame()->Buffer();
166 if (buffer) {
167 buffer += (frame()->Width() * frame()->Height());
168 }
169 return buffer;
170}
171
172uint8* WebRtcVideoFrame::GetVPlane() {
173 uint8_t* buffer = frame()->Buffer();
174 if (buffer) {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000175 int uv_size = static_cast<int>(GetChromaSize());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000176 buffer += frame()->Width() * frame()->Height() + uv_size;
177 }
178 return buffer;
179}
180
181VideoFrame* WebRtcVideoFrame::Copy() const {
182 const char* old_buffer = video_buffer_->data();
183 if (!old_buffer)
184 return NULL;
185 size_t new_buffer_size = video_buffer_->length();
186
187 WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
188 ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
189 frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
190 time_stamp_, rotation_);
191 return ret_val;
192}
193
194bool WebRtcVideoFrame::MakeExclusive() {
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000195 const int length = static_cast<int>(video_buffer_->length());
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000196 RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
197 memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
198 Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
199 pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
200 return true;
201}
202
203size_t WebRtcVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
204 if (!frame()->Buffer()) {
205 return 0;
206 }
207
208 size_t needed = frame()->Length();
209 if (needed <= size) {
210 memcpy(buffer, frame()->Buffer(), needed);
211 }
212 return needed;
213}
214
215// TODO(fbarchard): Refactor into base class and share with lmi
216size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
217 size_t size, int stride_rgb) const {
218 if (!frame()->Buffer()) {
219 return 0;
220 }
221 size_t width = frame()->Width();
222 size_t height = frame()->Height();
223 size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height;
224 if (size < needed) {
225 LOG(LS_WARNING) << "RGB buffer is not large enough";
226 return needed;
227 }
228
229 if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
230 GetUPitch(), GetVPlane(), GetVPitch(), buffer,
henrike@webrtc.org28654cb2013-07-22 21:07:49 +0000231 stride_rgb,
232 static_cast<int>(width),
233 static_cast<int>(height),
234 to_fourcc)) {
henrike@webrtc.org28e20752013-07-10 00:45:36 +0000235 LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
236 return 0; // 0 indicates error
237 }
238 return needed;
239}
240
241void WebRtcVideoFrame::Attach(
242 RefCountedBuffer* video_buffer, size_t buffer_size, int w, int h,
243 size_t pixel_width, size_t pixel_height, int64 elapsed_time,
244 int64 time_stamp, int rotation) {
245 if (video_buffer_.get() == video_buffer) {
246 return;
247 }
248 is_black_ = false;
249 video_buffer_ = video_buffer;
250 frame()->SetWidth(w);
251 frame()->SetHeight(h);
252 pixel_width_ = pixel_width;
253 pixel_height_ = pixel_height;
254 elapsed_time_ = elapsed_time;
255 time_stamp_ = time_stamp;
256 rotation_ = rotation;
257}
258
259// Add a square watermark near the left-low corner. clamp Y.
260// Returns false on error.
261bool WebRtcVideoFrame::AddWatermark() {
262 size_t w = GetWidth();
263 size_t h = GetHeight();
264
265 if (w < kWatermarkWidth + kWatermarkOffsetFromLeft ||
266 h < kWatermarkHeight + kWatermarkOffsetFromBottom) {
267 return false;
268 }
269
270 uint8* buffer = GetYPlane();
271 for (size_t x = kWatermarkOffsetFromLeft;
272 x < kWatermarkOffsetFromLeft + kWatermarkWidth; ++x) {
273 for (size_t y = h - kWatermarkOffsetFromBottom - kWatermarkHeight;
274 y < h - kWatermarkOffsetFromBottom; ++y) {
275 buffer[y * w + x] =
276 talk_base::_min(buffer[y * w + x], kWatermarkMaxYValue);
277 }
278 }
279 return true;
280}
281
282bool WebRtcVideoFrame::Reset(
283 uint32 format, int w, int h, int dw, int dh, uint8* sample,
284 size_t sample_size, size_t pixel_width, size_t pixel_height,
285 int64 elapsed_time, int64 time_stamp, int rotation) {
286 if (!Validate(format, w, h, sample, sample_size)) {
287 return false;
288 }
289 // Translate aliases to standard enums (e.g., IYUV -> I420).
290 format = CanonicalFourCC(format);
291
292 // Round display width and height down to multiple of 4, to avoid webrtc
293 // size calculation error on odd sizes.
294 // TODO(Ronghua): Remove this once the webrtc allocator is fixed.
295 dw = (dw > 4) ? (dw & ~3) : dw;
296 dh = (dh > 4) ? (dh & ~3) : dh;
297
298 // Set up a new buffer.
299 // TODO(fbarchard): Support lazy allocation.
300 int new_width = dw;
301 int new_height = dh;
302 if (rotation == 90 || rotation == 270) { // If rotated swap width, height.
303 new_width = dh;
304 new_height = dw;
305 }
306
307 size_t desired_size = SizeOf(new_width, new_height);
308 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
309 new RefCountedBuffer(desired_size));
310 // Since the libyuv::ConvertToI420 will handle the rotation, so the
311 // new frame's rotation should always be 0.
312 Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
313 pixel_height, elapsed_time, time_stamp, 0);
314
315 int horiz_crop = ((w - dw) / 2) & ~1;
316 // ARGB on Windows has negative height.
317 // The sample's layout in memory is normal, so just correct crop.
318 int vert_crop = ((abs(h) - dh) / 2) & ~1;
319 // Conversion functions expect negative height to flip the image.
320 int idh = (h < 0) ? -dh : dh;
321 uint8* y = GetYPlane();
322 int y_stride = GetYPitch();
323 uint8* u = GetUPlane();
324 int u_stride = GetUPitch();
325 uint8* v = GetVPlane();
326 int v_stride = GetVPitch();
327 int r = libyuv::ConvertToI420(
328 sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
329 vert_crop, w, h, dw, idh, static_cast<libyuv::RotationMode>(rotation),
330 format);
331 if (r) {
332 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format)
333 << " return code : " << r;
334 return false;
335 }
336 return true;
337}
338
339VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
340 int w, int h, size_t pixel_width, size_t pixel_height, int64 elapsed_time,
341 int64 time_stamp) const {
342 WebRtcVideoFrame* frame = new WebRtcVideoFrame();
343 frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
344 time_stamp);
345 return frame;
346}
347
348void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
349 size_t pixel_height,
350 int64 elapsed_time, int64 time_stamp) {
351 size_t buffer_size = VideoFrame::SizeOf(w, h);
352 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
353 new RefCountedBuffer(buffer_size));
354 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
355 elapsed_time, time_stamp, 0);
356}
357
358} // namespace cricket