blob: 80f24817ae55774caa3d4409db48d3f37a8dc53c [file] [log] [blame]
henrike@webrtc.org28e20752013-07-10 00:45:36 +00001/*
2 * libjingle
3 * Copyright 2011 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/media/webrtc/webrtcvideoframe.h"
29
30#include "libyuv/convert.h"
31#include "libyuv/convert_from.h"
32#include "libyuv/planar_functions.h"
33#include "talk/base/logging.h"
34#include "talk/media/base/videocapturer.h"
35#include "talk/media/base/videocommon.h"
36
37namespace cricket {
38
39static const int kWatermarkWidth = 8;
40static const int kWatermarkHeight = 8;
41static const int kWatermarkOffsetFromLeft = 8;
42static const int kWatermarkOffsetFromBottom = 8;
43static const unsigned char kWatermarkMaxYValue = 64;
44
45FrameBuffer::FrameBuffer() : length_(0) {}
46
47FrameBuffer::FrameBuffer(size_t length) : length_(0) {
48 char* buffer = new char[length];
49 SetData(buffer, length);
50}
51
52FrameBuffer::~FrameBuffer() {
53 // Make sure that the video_frame_ doesn't delete the buffer as it may be
54 // shared between multiple WebRtcVideoFrame.
55 uint8_t* new_memory = NULL;
56 uint32_t new_length = 0;
57 uint32_t new_size = 0;
58 video_frame_.Swap(new_memory, new_length, new_size);
59}
60
61void FrameBuffer::SetData(char* data, size_t length) {
62 data_.reset(data);
63 length_ = length;
64 uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
65 uint32_t new_length = length;
66 uint32_t new_size = length;
67 video_frame_.Swap(new_memory, new_length, new_size);
68}
69
70void FrameBuffer::ReturnData(char** data, size_t* length) {
71 uint8_t* old_memory = NULL;
72 uint32_t old_length = 0;
73 uint32_t old_size = 0;
74 video_frame_.Swap(old_memory, old_length, old_size);
75 data_.release();
76 length_ = 0;
77 *length = old_length;
78 *data = reinterpret_cast<char*>(old_memory);
79}
80
81char* FrameBuffer::data() { return data_.get(); }
82
83size_t FrameBuffer::length() const { return length_; }
84
85webrtc::VideoFrame* FrameBuffer::frame() { return &video_frame_; }
86
87const webrtc::VideoFrame* FrameBuffer::frame() const { return &video_frame_; }
88
89WebRtcVideoFrame::WebRtcVideoFrame()
90 : video_buffer_(new RefCountedBuffer()), is_black_(false) {}
91
92WebRtcVideoFrame::~WebRtcVideoFrame() {}
93
94bool WebRtcVideoFrame::Init(
95 uint32 format, int w, int h, int dw, int dh, uint8* sample,
96 size_t sample_size, size_t pixel_width, size_t pixel_height,
97 int64 elapsed_time, int64 time_stamp, int rotation) {
98 return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
99 pixel_height, elapsed_time, time_stamp, rotation);
100}
101
102bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
103 return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
104 static_cast<uint8*>(frame->data), frame->data_size,
105 frame->pixel_width, frame->pixel_height, frame->elapsed_time,
106 frame->time_stamp, frame->rotation);
107}
108
109bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
110 size_t pixel_height, int64 elapsed_time,
111 int64 time_stamp) {
112 InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
113 if (!is_black_) {
114 return SetToBlack();
115 }
116 return true;
117}
118
119void WebRtcVideoFrame::Attach(
120 uint8* buffer, size_t buffer_size, int w, int h, size_t pixel_width,
121 size_t pixel_height, int64 elapsed_time, int64 time_stamp, int rotation) {
122 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
123 new RefCountedBuffer());
124 video_buffer->SetData(reinterpret_cast<char*>(buffer), buffer_size);
125 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
126 elapsed_time, time_stamp, rotation);
127}
128
129void WebRtcVideoFrame::Detach(uint8** data, size_t* length) {
130 video_buffer_->ReturnData(reinterpret_cast<char**>(data), length);
131}
132
133size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
134
135size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
136
137const uint8* WebRtcVideoFrame::GetYPlane() const {
138 uint8_t* buffer = frame()->Buffer();
139 return buffer;
140}
141
142const uint8* WebRtcVideoFrame::GetUPlane() const {
143 uint8_t* buffer = frame()->Buffer();
144 if (buffer) {
145 buffer += (frame()->Width() * frame()->Height());
146 }
147 return buffer;
148}
149
150const uint8* WebRtcVideoFrame::GetVPlane() const {
151 uint8_t* buffer = frame()->Buffer();
152 if (buffer) {
153 int uv_size = GetChromaSize();
154 buffer += frame()->Width() * frame()->Height() + uv_size;
155 }
156 return buffer;
157}
158
159uint8* WebRtcVideoFrame::GetYPlane() {
160 uint8_t* buffer = frame()->Buffer();
161 return buffer;
162}
163
164uint8* WebRtcVideoFrame::GetUPlane() {
165 uint8_t* buffer = frame()->Buffer();
166 if (buffer) {
167 buffer += (frame()->Width() * frame()->Height());
168 }
169 return buffer;
170}
171
172uint8* WebRtcVideoFrame::GetVPlane() {
173 uint8_t* buffer = frame()->Buffer();
174 if (buffer) {
175 int uv_size = GetChromaSize();
176 buffer += frame()->Width() * frame()->Height() + uv_size;
177 }
178 return buffer;
179}
180
181VideoFrame* WebRtcVideoFrame::Copy() const {
182 const char* old_buffer = video_buffer_->data();
183 if (!old_buffer)
184 return NULL;
185 size_t new_buffer_size = video_buffer_->length();
186
187 WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
188 ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
189 frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
190 time_stamp_, rotation_);
191 return ret_val;
192}
193
194bool WebRtcVideoFrame::MakeExclusive() {
195 const int length = video_buffer_->length();
196 RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
197 memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
198 Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
199 pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
200 return true;
201}
202
203size_t WebRtcVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
204 if (!frame()->Buffer()) {
205 return 0;
206 }
207
208 size_t needed = frame()->Length();
209 if (needed <= size) {
210 memcpy(buffer, frame()->Buffer(), needed);
211 }
212 return needed;
213}
214
215// TODO(fbarchard): Refactor into base class and share with lmi
216size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
217 size_t size, int stride_rgb) const {
218 if (!frame()->Buffer()) {
219 return 0;
220 }
221 size_t width = frame()->Width();
222 size_t height = frame()->Height();
223 size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height;
224 if (size < needed) {
225 LOG(LS_WARNING) << "RGB buffer is not large enough";
226 return needed;
227 }
228
229 if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
230 GetUPitch(), GetVPlane(), GetVPitch(), buffer,
231 stride_rgb, width, height, to_fourcc)) {
232 LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
233 return 0; // 0 indicates error
234 }
235 return needed;
236}
237
238void WebRtcVideoFrame::Attach(
239 RefCountedBuffer* video_buffer, size_t buffer_size, int w, int h,
240 size_t pixel_width, size_t pixel_height, int64 elapsed_time,
241 int64 time_stamp, int rotation) {
242 if (video_buffer_.get() == video_buffer) {
243 return;
244 }
245 is_black_ = false;
246 video_buffer_ = video_buffer;
247 frame()->SetWidth(w);
248 frame()->SetHeight(h);
249 pixel_width_ = pixel_width;
250 pixel_height_ = pixel_height;
251 elapsed_time_ = elapsed_time;
252 time_stamp_ = time_stamp;
253 rotation_ = rotation;
254}
255
256// Add a square watermark near the left-low corner. clamp Y.
257// Returns false on error.
258bool WebRtcVideoFrame::AddWatermark() {
259 size_t w = GetWidth();
260 size_t h = GetHeight();
261
262 if (w < kWatermarkWidth + kWatermarkOffsetFromLeft ||
263 h < kWatermarkHeight + kWatermarkOffsetFromBottom) {
264 return false;
265 }
266
267 uint8* buffer = GetYPlane();
268 for (size_t x = kWatermarkOffsetFromLeft;
269 x < kWatermarkOffsetFromLeft + kWatermarkWidth; ++x) {
270 for (size_t y = h - kWatermarkOffsetFromBottom - kWatermarkHeight;
271 y < h - kWatermarkOffsetFromBottom; ++y) {
272 buffer[y * w + x] =
273 talk_base::_min(buffer[y * w + x], kWatermarkMaxYValue);
274 }
275 }
276 return true;
277}
278
279bool WebRtcVideoFrame::Reset(
280 uint32 format, int w, int h, int dw, int dh, uint8* sample,
281 size_t sample_size, size_t pixel_width, size_t pixel_height,
282 int64 elapsed_time, int64 time_stamp, int rotation) {
283 if (!Validate(format, w, h, sample, sample_size)) {
284 return false;
285 }
286 // Translate aliases to standard enums (e.g., IYUV -> I420).
287 format = CanonicalFourCC(format);
288
289 // Round display width and height down to multiple of 4, to avoid webrtc
290 // size calculation error on odd sizes.
291 // TODO(Ronghua): Remove this once the webrtc allocator is fixed.
292 dw = (dw > 4) ? (dw & ~3) : dw;
293 dh = (dh > 4) ? (dh & ~3) : dh;
294
295 // Set up a new buffer.
296 // TODO(fbarchard): Support lazy allocation.
297 int new_width = dw;
298 int new_height = dh;
299 if (rotation == 90 || rotation == 270) { // If rotated swap width, height.
300 new_width = dh;
301 new_height = dw;
302 }
303
304 size_t desired_size = SizeOf(new_width, new_height);
305 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
306 new RefCountedBuffer(desired_size));
307 // Since the libyuv::ConvertToI420 will handle the rotation, so the
308 // new frame's rotation should always be 0.
309 Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
310 pixel_height, elapsed_time, time_stamp, 0);
311
312 int horiz_crop = ((w - dw) / 2) & ~1;
313 // ARGB on Windows has negative height.
314 // The sample's layout in memory is normal, so just correct crop.
315 int vert_crop = ((abs(h) - dh) / 2) & ~1;
316 // Conversion functions expect negative height to flip the image.
317 int idh = (h < 0) ? -dh : dh;
318 uint8* y = GetYPlane();
319 int y_stride = GetYPitch();
320 uint8* u = GetUPlane();
321 int u_stride = GetUPitch();
322 uint8* v = GetVPlane();
323 int v_stride = GetVPitch();
324 int r = libyuv::ConvertToI420(
325 sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
326 vert_crop, w, h, dw, idh, static_cast<libyuv::RotationMode>(rotation),
327 format);
328 if (r) {
329 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format)
330 << " return code : " << r;
331 return false;
332 }
333 return true;
334}
335
336VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
337 int w, int h, size_t pixel_width, size_t pixel_height, int64 elapsed_time,
338 int64 time_stamp) const {
339 WebRtcVideoFrame* frame = new WebRtcVideoFrame();
340 frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
341 time_stamp);
342 return frame;
343}
344
345void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
346 size_t pixel_height,
347 int64 elapsed_time, int64 time_stamp) {
348 size_t buffer_size = VideoFrame::SizeOf(w, h);
349 talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
350 new RefCountedBuffer(buffer_size));
351 Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
352 elapsed_time, time_stamp, 0);
353}
354
355} // namespace cricket