blob: c9d96c621225b48863aad8be806983e12d5e7b5b [file] [log] [blame]
Urvang Joshid538cea2013-09-12 13:41:09 -07001// Copyright 2013 Google Inc. All Rights Reserved.
2//
3// Use of this source code is governed by a BSD-style license
4// that can be found in the COPYING file in the root of the source
5// tree. An additional intellectual property rights grant can be found
6// in the file PATENTS. All contributing project authors may
7// be found in the AUTHORS file in the root of the source tree.
8// -----------------------------------------------------------------------------
9//
10// Helper structs and methods for gif2webp tool.
11//
12
13#include <assert.h>
14#include <stdio.h>
15
16#include "webp/encode.h"
17#include "./gif2webp_util.h"
18
19#define DELTA_INFINITY 1ULL << 32
20#define KEYFRAME_NONE -1
21
22//------------------------------------------------------------------------------
Urvang Joshi38efdc22013-10-14 14:39:46 -070023// Helper utilities.
Urvang Joshid538cea2013-09-12 13:41:09 -070024
25static void ClearRectangle(WebPPicture* const picture,
26 int left, int top, int width, int height) {
27 int j;
28 for (j = top; j < top + height; ++j) {
29 uint32_t* const dst = picture->argb + j * picture->argb_stride;
30 int i;
31 for (i = left; i < left + width; ++i) {
Urvang Joshi38efdc22013-10-14 14:39:46 -070032 dst[i] = WEBP_UTIL_TRANSPARENT_COLOR;
Urvang Joshid538cea2013-09-12 13:41:09 -070033 }
34 }
35}
36
Urvang Joshid538cea2013-09-12 13:41:09 -070037void WebPUtilClearPic(WebPPicture* const picture,
38 const WebPFrameRect* const rect) {
39 if (rect != NULL) {
40 ClearRectangle(picture, rect->x_offset, rect->y_offset,
41 rect->width, rect->height);
42 } else {
43 ClearRectangle(picture, 0, 0, picture->width, picture->height);
44 }
45}
46
47// TODO: Also used in picture.c. Move to a common location?
48// Copy width x height pixels from 'src' to 'dst' honoring the strides.
49static void CopyPlane(const uint8_t* src, int src_stride,
50 uint8_t* dst, int dst_stride, int width, int height) {
51 while (height-- > 0) {
52 memcpy(dst, src, width);
53 src += src_stride;
54 dst += dst_stride;
55 }
56}
57
Urvang Joshi38efdc22013-10-14 14:39:46 -070058// Copy pixels from 'src' to 'dst' honoring strides. 'src' and 'dst' are assumed
59// to be already allocated.
60static void CopyPixels(const WebPPicture* const src, WebPPicture* const dst) {
Urvang Joshid538cea2013-09-12 13:41:09 -070061 assert(src->width == dst->width && src->height == dst->height);
62 CopyPlane((uint8_t*)src->argb, 4 * src->argb_stride, (uint8_t*)dst->argb,
63 4 * dst->argb_stride, 4 * src->width, src->height);
64}
65
Urvang Joshi38efdc22013-10-14 14:39:46 -070066// Given 'src' picture and its frame rectangle 'rect', blend it into 'dst'.
67static void BlendPixels(const WebPPicture* const src,
68 const WebPFrameRect* const rect,
69 WebPPicture* const dst) {
Urvang Joshid538cea2013-09-12 13:41:09 -070070 int j;
71 assert(src->width == dst->width && src->height == dst->height);
72 for (j = rect->y_offset; j < rect->y_offset + rect->height; ++j) {
73 int i;
74 for (i = rect->x_offset; i < rect->x_offset + rect->width; ++i) {
75 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
76 const int src_alpha = src_pixel >> 24;
77 if (src_alpha != 0) {
78 dst->argb[j * dst->argb_stride + i] = src_pixel;
79 }
80 }
81 }
82}
83
Urvang Joshi38efdc22013-10-14 14:39:46 -070084// Replace transparent pixels within 'dst_rect' of 'dst' by those in the 'src'.
85static void ReduceTransparency(const WebPPicture* const src,
86 const WebPFrameRect* const rect,
87 WebPPicture* const dst) {
skal00125192013-10-08 15:04:52 +020088 int i, j;
89 assert(src != NULL && dst != NULL && rect != NULL);
Urvang Joshi606c4302013-09-30 16:48:39 -070090 assert(src->width == dst->width && src->height == dst->height);
91 for (j = rect->y_offset; j < rect->y_offset + rect->height; ++j) {
Urvang Joshi606c4302013-09-30 16:48:39 -070092 for (i = rect->x_offset; i < rect->x_offset + rect->width; ++i) {
93 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
94 const int src_alpha = src_pixel >> 24;
95 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
96 const int dst_alpha = dst_pixel >> 24;
97 if (dst_alpha == 0 && src_alpha == 0xff) {
98 dst->argb[j * dst->argb_stride + i] = src_pixel;
99 }
100 }
101 }
102}
103
Urvang Joshi38efdc22013-10-14 14:39:46 -0700104// Replace similar blocks of pixels by a 'see-through' transparent block
105// with uniform average color.
106static void FlattenSimilarBlocks(const WebPPicture* const src,
107 const WebPFrameRect* const rect,
108 WebPPicture* const dst) {
skal00125192013-10-08 15:04:52 +0200109 int i, j;
110 const int block_size = 8;
111 const int y_start = (rect->y_offset + block_size) & ~(block_size - 1);
112 const int y_end = (rect->y_offset + rect->height) & ~(block_size - 1);
113 const int x_start = (rect->x_offset + block_size) & ~(block_size - 1);
114 const int x_end = (rect->x_offset + rect->width) & ~(block_size - 1);
115 assert(src != NULL && dst != NULL && rect != NULL);
116 assert(src->width == dst->width && src->height == dst->height);
117 assert((block_size & (block_size - 1)) == 0); // must be a power of 2
118 // Iterate over each block and count similar pixels.
119 for (j = y_start; j < y_end; j += block_size) {
120 for (i = x_start; i < x_end; i += block_size) {
121 int cnt = 0;
122 int avg_r = 0, avg_g = 0, avg_b = 0;
123 int x, y;
124 const uint32_t* const psrc = src->argb + j * src->argb_stride + i;
125 uint32_t* const pdst = dst->argb + j * dst->argb_stride + i;
126 for (y = 0; y < block_size; ++y) {
127 for (x = 0; x < block_size; ++x) {
128 const uint32_t src_pixel = psrc[x + y * src->argb_stride];
129 const int alpha = src_pixel >> 24;
130 if (alpha == 0xff &&
131 src_pixel == pdst[x + y * dst->argb_stride]) {
132 ++cnt;
133 avg_r += (src_pixel >> 16) & 0xff;
134 avg_g += (src_pixel >> 8) & 0xff;
135 avg_b += (src_pixel >> 0) & 0xff;
136 }
137 }
138 }
139 // If we have a fully similar block, we replace it with an
140 // average transparent block. This compresses better in lossy mode.
141 if (cnt == block_size * block_size) {
142 const uint32_t color = (0x00 << 24) |
143 ((avg_r / cnt) << 16) |
144 ((avg_g / cnt) << 8) |
145 ((avg_b / cnt) << 0);
146 for (y = 0; y < block_size; ++y) {
147 for (x = 0; x < block_size; ++x) {
148 pdst[x + y * dst->argb_stride] = color;
149 }
150 }
151 }
152 }
153 }
154}
155
Urvang Joshid538cea2013-09-12 13:41:09 -0700156//------------------------------------------------------------------------------
157// Key frame related utilities.
158
Urvang Joshi38efdc22013-10-14 14:39:46 -0700159// Returns true if 'curr' frame with frame rectangle 'curr_rect' is a key frame,
160// that is, it can be decoded independently of 'prev' canvas.
161static int IsKeyFrame(const WebPPicture* const curr,
162 const WebPFrameRect* const curr_rect,
163 const WebPPicture* const prev) {
Urvang Joshid538cea2013-09-12 13:41:09 -0700164 int i, j;
165 int is_key_frame = 1;
166
167 // If previous canvas (with previous frame disposed) is all transparent,
168 // current frame is a key frame.
169 for (i = 0; i < prev->width; ++i) {
170 for (j = 0; j < prev->height; ++j) {
171 const uint32_t prev_alpha = (prev->argb[j * prev->argb_stride + i]) >> 24;
172 if (prev_alpha != 0) {
173 is_key_frame = 0;
174 break;
175 }
176 }
177 if (!is_key_frame) break;
178 }
179 if (is_key_frame) return 1;
180
181 // If current frame covers the whole canvas and does not contain any
182 // transparent pixels that depend on previous canvas, then current frame is
183 // a key frame.
184 if (curr_rect->width == curr->width && curr_rect->height == curr->height) {
185 assert(curr_rect->x_offset == 0 && curr_rect->y_offset == 0);
186 is_key_frame = 1;
187 for (j = 0; j < prev->height; ++j) {
188 for (i = 0; i < prev->width; ++i) {
189 const uint32_t prev_alpha =
190 (prev->argb[j * prev->argb_stride + i]) >> 24;
191 const uint32_t curr_alpha =
192 (curr->argb[j * curr->argb_stride + i]) >> 24;
193 if (curr_alpha != 0xff && prev_alpha != 0) {
194 is_key_frame = 0;
195 break;
196 }
197 }
198 if (!is_key_frame) break;
199 }
200 if (is_key_frame) return 1;
201 }
202
203 return 0;
204}
205
Urvang Joshi38efdc22013-10-14 14:39:46 -0700206// Given 'prev' frame and current frame rectangle 'rect', convert 'curr' frame
207// to a key frame.
208static void ConvertToKeyFrame(const WebPPicture* const prev,
209 WebPFrameRect* const rect,
210 WebPPicture* const curr) {
Urvang Joshid538cea2013-09-12 13:41:09 -0700211 int j;
212 assert(curr->width == prev->width && curr->height == prev->height);
213
214 // Replace transparent pixels of current canvas with those from previous
215 // canvas (with previous frame disposed).
216 for (j = 0; j < curr->height; ++j) {
217 int i;
218 for (i = 0; i < curr->width; ++i) {
219 uint32_t* const curr_pixel = curr->argb + j * curr->argb_stride + i;
220 const int curr_alpha = *curr_pixel >> 24;
221 if (curr_alpha == 0) {
222 *curr_pixel = prev->argb[j * prev->argb_stride + i];
223 }
224 }
225 }
226
227 // Frame rectangle now covers the whole canvas.
228 rect->x_offset = 0;
229 rect->y_offset = 0;
230 rect->width = curr->width;
231 rect->height = curr->height;
232}
233
234//------------------------------------------------------------------------------
Urvang Joshi38efdc22013-10-14 14:39:46 -0700235// Encoded frame.
236
237// Used to store two candidates of encoded data for an animation frame. One of
238// the two will be chosen later.
239typedef struct {
240 WebPMuxFrameInfo sub_frame; // Encoded frame rectangle.
241 WebPMuxFrameInfo key_frame; // Encoded frame if it was converted to keyframe.
242} EncodedFrame;
243
244// Release the data contained by 'encoded_frame'.
245static void FrameRelease(EncodedFrame* const encoded_frame) {
246 WebPDataClear(&encoded_frame->sub_frame.bitstream);
247 WebPDataClear(&encoded_frame->key_frame.bitstream);
248 memset(encoded_frame, 0, sizeof(*encoded_frame));
249}
250
251//------------------------------------------------------------------------------
252// Frame cache.
253
254// Used to store encoded frames that haven't been output yet.
255struct WebPFrameCache {
256 EncodedFrame* encoded_frames; // Array of encoded frames.
257 size_t size; // Number of allocated data elements.
258 size_t start; // Start index.
259 size_t count; // Number of valid data elements.
260 int flush_count; // If >0, ‘flush_count’ frames starting from
261 // 'start' are ready to be added to mux.
262 int64_t best_delta; // min(canvas size - frame size) over the frames.
263 // Can be negative in certain cases due to
264 // transparent pixels in a frame.
265 int keyframe; // Index of selected keyframe relative to 'start'.
266
267 size_t kmin; // Min distance between key frames.
268 size_t kmax; // Max distance between key frames.
269 size_t count_since_key_frame; // Frames seen since the last key frame.
Urvang Joshi73f52132013-11-17 18:04:07 -0800270 int allow_mixed; // If true, each frame can be lossy or lossless.
Urvang Joshi38efdc22013-10-14 14:39:46 -0700271 WebPPicture prev_canvas; // Previous canvas (properly disposed).
272 WebPPicture curr_canvas; // Current canvas (temporary buffer).
273 int is_first_frame; // True if no frames have been added to the cache
274 // since WebPFrameCacheNew().
275};
276
277// Reset the counters in the cache struct. Doesn't touch 'cache->encoded_frames'
278// and 'cache->size'.
279static void CacheReset(WebPFrameCache* const cache) {
280 cache->start = 0;
281 cache->count = 0;
282 cache->flush_count = 0;
283 cache->best_delta = DELTA_INFINITY;
284 cache->keyframe = KEYFRAME_NONE;
285}
286
287WebPFrameCache* WebPFrameCacheNew(int width, int height,
Urvang Joshi73f52132013-11-17 18:04:07 -0800288 size_t kmin, size_t kmax, int allow_mixed) {
Urvang Joshi38efdc22013-10-14 14:39:46 -0700289 WebPFrameCache* cache = (WebPFrameCache*)malloc(sizeof(*cache));
290 if (cache == NULL) return NULL;
291 CacheReset(cache);
292 cache->is_first_frame = 1;
293
294 // Picture buffers.
295 if (!WebPPictureInit(&cache->prev_canvas) ||
296 !WebPPictureInit(&cache->curr_canvas)) {
297 return NULL;
298 }
299 cache->prev_canvas.width = width;
300 cache->prev_canvas.height = height;
301 cache->prev_canvas.use_argb = 1;
302 if (!WebPPictureAlloc(&cache->prev_canvas) ||
303 !WebPPictureCopy(&cache->prev_canvas, &cache->curr_canvas)) {
304 goto Err;
305 }
306 WebPUtilClearPic(&cache->prev_canvas, NULL);
307
308 // Cache data.
Urvang Joshi73f52132013-11-17 18:04:07 -0800309 cache->allow_mixed = allow_mixed;
Urvang Joshi38efdc22013-10-14 14:39:46 -0700310 cache->kmin = kmin;
311 cache->kmax = kmax;
312 cache->count_since_key_frame = 0;
313 assert(kmax > kmin);
314 cache->size = kmax - kmin;
315 cache->encoded_frames =
316 (EncodedFrame*)calloc(cache->size, sizeof(*cache->encoded_frames));
317 if (cache->encoded_frames == NULL) goto Err;
318
319 return cache; // All OK.
320
321 Err:
322 WebPFrameCacheDelete(cache);
323 return NULL;
324}
325
326void WebPFrameCacheDelete(WebPFrameCache* const cache) {
327 if (cache != NULL) {
328 size_t i;
329 for (i = 0; i < cache->size; ++i) {
330 FrameRelease(&cache->encoded_frames[i]);
331 }
332 free(cache->encoded_frames);
333 WebPPictureFree(&cache->prev_canvas);
334 WebPPictureFree(&cache->curr_canvas);
335 free(cache);
336 }
337}
338
339static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic,
Urvang Joshi73f52132013-11-17 18:04:07 -0800340 WebPMemoryWriter* const memory) {
Urvang Joshi38efdc22013-10-14 14:39:46 -0700341 pic->use_argb = 1;
342 pic->writer = WebPMemoryWrite;
Urvang Joshi73f52132013-11-17 18:04:07 -0800343 pic->custom_ptr = memory;
Urvang Joshi38efdc22013-10-14 14:39:46 -0700344 if (!WebPEncode(config, pic)) {
345 return 0;
346 }
Urvang Joshi38efdc22013-10-14 14:39:46 -0700347 return 1;
348}
349
Urvang Joshi73f52132013-11-17 18:04:07 -0800350static void GetEncodedData(const WebPMemoryWriter* const memory,
351 WebPData* const encoded_data) {
352 encoded_data->bytes = memory->mem;
353 encoded_data->size = memory->size;
354}
355
356#define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold.
357#define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold.
358#define MAX_COLOR_COUNT 256 // Power of 2 greater than MAX_COLORS_LOSSLESS.
359#define HASH_SIZE (MAX_COLOR_COUNT * 4)
360#define HASH_RIGHT_SHIFT 22 // 32 - log2(HASH_SIZE).
361
362// TODO(urvang): Also used in enc/vp8l.c. Move to utils.
363// If the number of colors in the 'pic' is at least MAX_COLOR_COUNT, return
364// MAX_COLOR_COUNT. Otherwise, return the exact number of colors in the 'pic'.
365static int GetColorCount(const WebPPicture* const pic) {
366 int x, y;
367 int num_colors = 0;
368 uint8_t in_use[HASH_SIZE] = { 0 };
369 uint32_t colors[HASH_SIZE];
370 static const uint32_t kHashMul = 0x1e35a7bd;
371 const uint32_t* argb = pic->argb;
372 const int width = pic->width;
373 const int height = pic->height;
374 uint32_t last_pix = ~argb[0]; // so we're sure that last_pix != argb[0]
375
376 for (y = 0; y < height; ++y) {
377 for (x = 0; x < width; ++x) {
378 int key;
379 if (argb[x] == last_pix) {
380 continue;
381 }
382 last_pix = argb[x];
383 key = (kHashMul * last_pix) >> HASH_RIGHT_SHIFT;
384 while (1) {
385 if (!in_use[key]) {
386 colors[key] = last_pix;
387 in_use[key] = 1;
388 ++num_colors;
389 if (num_colors >= MAX_COLOR_COUNT) {
390 return MAX_COLOR_COUNT; // Exact count not needed.
391 }
392 break;
393 } else if (colors[key] == last_pix) {
394 break; // The color is already there.
395 } else {
396 // Some other color sits here, so do linear conflict resolution.
397 ++key;
398 key &= (HASH_SIZE - 1); // Key mask.
399 }
400 }
401 }
402 argb += pic->argb_stride;
403 }
404 return num_colors;
405}
406
407#undef MAX_COLOR_COUNT
408#undef HASH_SIZE
409#undef HASH_RIGHT_SHIFT
410
411static int SetFrame(const WebPConfig* const config, int allow_mixed,
412 int is_key_frame, const WebPPicture* const prev_canvas,
413 WebPPicture* const frame, const WebPFrameRect* const rect,
414 const WebPMuxFrameInfo* const info,
415 WebPPicture* const sub_frame, EncodedFrame* encoded_frame) {
416 int try_lossless;
417 int try_lossy;
418 int try_both;
419 WebPMemoryWriter mem1, mem2;
420 WebPData* encoded_data;
421 WebPMuxFrameInfo* const dst =
422 is_key_frame ? &encoded_frame->key_frame : &encoded_frame->sub_frame;
423 *dst = *info;
424 encoded_data = &dst->bitstream;
425 WebPMemoryWriterInit(&mem1);
426 WebPMemoryWriterInit(&mem2);
427
428 if (!allow_mixed) {
429 try_lossless = config->lossless;
430 try_lossy = !try_lossless;
431 } else { // Use a heuristic for trying lossless and/or lossy compression.
432 const int num_colors = GetColorCount(sub_frame);
433 try_lossless = (num_colors < MAX_COLORS_LOSSLESS);
434 try_lossy = (num_colors >= MIN_COLORS_LOSSY);
435 }
436 try_both = try_lossless && try_lossy;
437
438 if (try_lossless) {
439 WebPConfig config_ll = *config;
440 config_ll.lossless = 1;
441 if (!EncodeFrame(&config_ll, sub_frame, &mem1)) {
442 goto Err;
443 }
444 }
445
446 if (try_lossy) {
447 WebPConfig config_lossy = *config;
448 config_lossy.lossless = 0;
449 if (!is_key_frame) {
450 // For lossy compression of a frame, it's better to replace transparent
451 // pixels of 'curr' with actual RGB values, whenever possible.
452 ReduceTransparency(prev_canvas, rect, frame);
453 // TODO(later): Investigate if this helps lossless compression as well.
454 FlattenSimilarBlocks(prev_canvas, rect, frame);
455 }
456 if (!EncodeFrame(&config_lossy, sub_frame, &mem2)) {
457 goto Err;
458 }
459 }
460
461 if (try_both) { // Pick the encoding with smallest size.
462 // TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should
463 // also be a criteria, in addition to sizes.
464 if (mem1.size <= mem2.size) {
465 free(mem2.mem);
466 GetEncodedData(&mem1, encoded_data);
467 } else {
468 free(mem1.mem);
469 GetEncodedData(&mem2, encoded_data);
470 }
471 } else {
472 GetEncodedData(try_lossless ? &mem1 : &mem2, encoded_data);
473 }
474 return 1;
475
476 Err:
477 free(mem1.mem);
478 free(mem2.mem);
479 return 0;
480}
481
482#undef MIN_COLORS_LOSSY
483#undef MAX_COLORS_LOSSLESS
484
Urvang Joshi38efdc22013-10-14 14:39:46 -0700485// Returns cached frame at given 'position' index.
486static EncodedFrame* CacheGetFrame(const WebPFrameCache* const cache,
487 size_t position) {
488 assert(cache->start + position < cache->size);
489 return &cache->encoded_frames[cache->start + position];
490}
491
492// Calculate the penalty incurred if we encode given frame as a key frame
493// instead of a sub-frame.
494static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) {
495 return ((int64_t)encoded_frame->key_frame.bitstream.size -
496 encoded_frame->sub_frame.bitstream.size);
497}
498
Urvang Joshi38efdc22013-10-14 14:39:46 -0700499static void DisposeFrame(WebPMuxAnimDispose dispose_method,
500 const WebPFrameRect* const gif_rect,
501 WebPPicture* const frame, WebPPicture* const canvas) {
502 if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
503 WebPUtilClearPic(frame, NULL);
504 WebPUtilClearPic(canvas, gif_rect);
505 }
506}
507
508int WebPFrameCacheAddFrame(WebPFrameCache* const cache,
509 const WebPConfig* const config,
510 const WebPFrameRect* const orig_rect,
511 WebPPicture* const frame,
512 WebPMuxFrameInfo* const info) {
513 int ok = 0;
514 WebPFrameRect rect = *orig_rect;
515 WebPPicture sub_image; // View extracted from 'frame' with rectangle 'rect'.
516 WebPPicture* const prev_canvas = &cache->prev_canvas;
517 const size_t position = cache->count;
Urvang Joshi73f52132013-11-17 18:04:07 -0800518 const int allow_mixed = cache->allow_mixed;
Urvang Joshi38efdc22013-10-14 14:39:46 -0700519 EncodedFrame* const encoded_frame = CacheGetFrame(cache, position);
520 assert(position < cache->size);
521
522 // Snap to even offsets (and adjust dimensions if needed).
523 rect.width += (rect.x_offset & 1);
524 rect.height += (rect.y_offset & 1);
525 rect.x_offset &= ~1;
526 rect.y_offset &= ~1;
527
528 if (!WebPPictureView(frame, rect.x_offset, rect.y_offset,
529 rect.width, rect.height, &sub_image)) {
530 return 0;
531 }
532 info->x_offset = rect.x_offset;
533 info->y_offset = rect.y_offset;
534
535 ++cache->count;
536
537 if (cache->is_first_frame || IsKeyFrame(frame, &rect, prev_canvas)) {
538 // Add this as a key frame.
Urvang Joshi73f52132013-11-17 18:04:07 -0800539 if (!SetFrame(config, allow_mixed, 1, NULL, NULL, NULL, info, &sub_image,
Urvang Joshi38efdc22013-10-14 14:39:46 -0700540 encoded_frame)) {
541 goto End;
542 }
543 cache->keyframe = position;
544 cache->flush_count = cache->count;
545 cache->count_since_key_frame = 0;
546 // Update prev_canvas by simply copying from 'curr'.
547 CopyPixels(frame, prev_canvas);
548 } else {
549 ++cache->count_since_key_frame;
550 if (cache->count_since_key_frame <= cache->kmin) {
551 // Add this as a frame rectangle.
Urvang Joshi73f52132013-11-17 18:04:07 -0800552 if (!SetFrame(config, allow_mixed, 0, prev_canvas, frame, &rect, info,
553 &sub_image, encoded_frame)) {
Urvang Joshi38efdc22013-10-14 14:39:46 -0700554 goto End;
555 }
556 cache->flush_count = cache->count;
557 // Update prev_canvas by blending 'curr' into it.
558 BlendPixels(frame, orig_rect, prev_canvas);
559 } else {
560 WebPPicture full_image;
561 WebPMuxFrameInfo full_image_info;
562 int frame_added;
563 int64_t curr_delta;
564
565 // Add frame rectangle to cache.
Urvang Joshi73f52132013-11-17 18:04:07 -0800566 if (!SetFrame(config, allow_mixed, 0, prev_canvas, frame, &rect, info,
567 &sub_image, encoded_frame)) {
Urvang Joshi38efdc22013-10-14 14:39:46 -0700568 goto End;
569 }
570
571 // Convert to a key frame.
572 CopyPixels(frame, &cache->curr_canvas);
573 ConvertToKeyFrame(prev_canvas, &rect, &cache->curr_canvas);
574 if (!WebPPictureView(&cache->curr_canvas, rect.x_offset, rect.y_offset,
575 rect.width, rect.height, &full_image)) {
576 goto End;
577 }
578 full_image_info = *info;
579 full_image_info.x_offset = rect.x_offset;
580 full_image_info.y_offset = rect.y_offset;
581
582 // Add key frame to cache, too.
Urvang Joshi73f52132013-11-17 18:04:07 -0800583 frame_added = SetFrame(config, allow_mixed, 1, NULL, NULL, NULL,
584 &full_image_info, &full_image, encoded_frame);
Urvang Joshi38efdc22013-10-14 14:39:46 -0700585 WebPPictureFree(&full_image);
586 if (!frame_added) goto End;
587
588 // Analyze size difference of the two variants.
589 curr_delta = KeyFramePenalty(encoded_frame);
590 if (curr_delta <= cache->best_delta) { // Pick this as keyframe.
591 cache->keyframe = position;
592 cache->best_delta = curr_delta;
593 cache->flush_count = cache->count - 1; // We can flush previous frames.
594 }
595 if (cache->count_since_key_frame == cache->kmax) {
596 cache->flush_count = cache->count;
597 cache->count_since_key_frame = 0;
598 }
599
600 // Update prev_canvas by simply copying from 'curr_canvas'.
601 CopyPixels(&cache->curr_canvas, prev_canvas);
602 }
603 }
604
605 DisposeFrame(info->dispose_method, orig_rect, frame, prev_canvas);
606
607 cache->is_first_frame = 0;
608 ok = 1;
609
610 End:
611 WebPPictureFree(&sub_image);
Urvang Joshi73f52132013-11-17 18:04:07 -0800612 if (!ok) {
613 FrameRelease(encoded_frame);
614 --cache->count; // We reset the count, as the frame addition failed.
615 }
Urvang Joshi38efdc22013-10-14 14:39:46 -0700616 return ok;
617}
618
619WebPMuxError WebPFrameCacheFlush(WebPFrameCache* const cache, int verbose,
620 WebPMux* const mux) {
621 while (cache->flush_count > 0) {
622 WebPMuxFrameInfo* info;
623 WebPMuxError err;
624 EncodedFrame* const curr = CacheGetFrame(cache, 0);
625 // Pick frame or full canvas.
626 if (cache->keyframe == 0) {
627 info = &curr->key_frame;
628 info->blend_method = WEBP_MUX_NO_BLEND;
629 cache->keyframe = KEYFRAME_NONE;
630 cache->best_delta = DELTA_INFINITY;
631 } else {
632 info = &curr->sub_frame;
633 info->blend_method = WEBP_MUX_BLEND;
634 }
635 // Add to mux.
636 err = WebPMuxPushFrame(mux, info, 1);
637 if (err != WEBP_MUX_OK) return err;
638 if (verbose) {
639 printf("Added frame. offset:%d,%d duration:%d dispose:%d blend:%d\n",
640 info->x_offset, info->y_offset, info->duration,
641 info->dispose_method, info->blend_method);
642 }
643 FrameRelease(curr);
644 ++cache->start;
645 --cache->flush_count;
646 --cache->count;
647 if (cache->keyframe != KEYFRAME_NONE) --cache->keyframe;
648 }
649
650 if (cache->count == 0) CacheReset(cache);
651 return WEBP_MUX_OK;
652}
653
654WebPMuxError WebPFrameCacheFlushAll(WebPFrameCache* const cache, int verbose,
655 WebPMux* const mux) {
656 cache->flush_count = cache->count; // Force flushing of all frames.
657 return WebPFrameCacheFlush(cache, verbose, mux);
658}
659
660//------------------------------------------------------------------------------