Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 1 | // Copyright 2013 Google Inc. All Rights Reserved. |
| 2 | // |
| 3 | // Use of this source code is governed by a BSD-style license |
| 4 | // that can be found in the COPYING file in the root of the source |
| 5 | // tree. An additional intellectual property rights grant can be found |
| 6 | // in the file PATENTS. All contributing project authors may |
| 7 | // be found in the AUTHORS file in the root of the source tree. |
| 8 | // ----------------------------------------------------------------------------- |
| 9 | // |
| 10 | // Helper structs and methods for gif2webp tool. |
| 11 | // |
| 12 | |
| 13 | #include <assert.h> |
| 14 | #include <stdio.h> |
| 15 | |
| 16 | #include "webp/encode.h" |
| 17 | #include "./gif2webp_util.h" |
| 18 | |
| 19 | #define DELTA_INFINITY 1ULL << 32 |
| 20 | #define KEYFRAME_NONE -1 |
| 21 | |
| 22 | //------------------------------------------------------------------------------ |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 23 | // Helper utilities. |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 24 | |
| 25 | static void ClearRectangle(WebPPicture* const picture, |
| 26 | int left, int top, int width, int height) { |
| 27 | int j; |
| 28 | for (j = top; j < top + height; ++j) { |
| 29 | uint32_t* const dst = picture->argb + j * picture->argb_stride; |
| 30 | int i; |
| 31 | for (i = left; i < left + width; ++i) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 32 | dst[i] = WEBP_UTIL_TRANSPARENT_COLOR; |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 33 | } |
| 34 | } |
| 35 | } |
| 36 | |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 37 | void WebPUtilClearPic(WebPPicture* const picture, |
| 38 | const WebPFrameRect* const rect) { |
| 39 | if (rect != NULL) { |
| 40 | ClearRectangle(picture, rect->x_offset, rect->y_offset, |
| 41 | rect->width, rect->height); |
| 42 | } else { |
| 43 | ClearRectangle(picture, 0, 0, picture->width, picture->height); |
| 44 | } |
| 45 | } |
| 46 | |
| 47 | // TODO: Also used in picture.c. Move to a common location? |
| 48 | // Copy width x height pixels from 'src' to 'dst' honoring the strides. |
| 49 | static void CopyPlane(const uint8_t* src, int src_stride, |
| 50 | uint8_t* dst, int dst_stride, int width, int height) { |
| 51 | while (height-- > 0) { |
| 52 | memcpy(dst, src, width); |
| 53 | src += src_stride; |
| 54 | dst += dst_stride; |
| 55 | } |
| 56 | } |
| 57 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 58 | // Copy pixels from 'src' to 'dst' honoring strides. 'src' and 'dst' are assumed |
| 59 | // to be already allocated. |
| 60 | static void CopyPixels(const WebPPicture* const src, WebPPicture* const dst) { |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 61 | assert(src->width == dst->width && src->height == dst->height); |
| 62 | CopyPlane((uint8_t*)src->argb, 4 * src->argb_stride, (uint8_t*)dst->argb, |
| 63 | 4 * dst->argb_stride, 4 * src->width, src->height); |
| 64 | } |
| 65 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 66 | // Given 'src' picture and its frame rectangle 'rect', blend it into 'dst'. |
| 67 | static void BlendPixels(const WebPPicture* const src, |
| 68 | const WebPFrameRect* const rect, |
| 69 | WebPPicture* const dst) { |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 70 | int j; |
| 71 | assert(src->width == dst->width && src->height == dst->height); |
| 72 | for (j = rect->y_offset; j < rect->y_offset + rect->height; ++j) { |
| 73 | int i; |
| 74 | for (i = rect->x_offset; i < rect->x_offset + rect->width; ++i) { |
| 75 | const uint32_t src_pixel = src->argb[j * src->argb_stride + i]; |
| 76 | const int src_alpha = src_pixel >> 24; |
| 77 | if (src_alpha != 0) { |
| 78 | dst->argb[j * dst->argb_stride + i] = src_pixel; |
| 79 | } |
| 80 | } |
| 81 | } |
| 82 | } |
| 83 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 84 | // Replace transparent pixels within 'dst_rect' of 'dst' by those in the 'src'. |
| 85 | static void ReduceTransparency(const WebPPicture* const src, |
| 86 | const WebPFrameRect* const rect, |
| 87 | WebPPicture* const dst) { |
skal | 0012519 | 2013-10-08 15:04:52 +0200 | [diff] [blame] | 88 | int i, j; |
| 89 | assert(src != NULL && dst != NULL && rect != NULL); |
Urvang Joshi | 606c430 | 2013-09-30 16:48:39 -0700 | [diff] [blame] | 90 | assert(src->width == dst->width && src->height == dst->height); |
| 91 | for (j = rect->y_offset; j < rect->y_offset + rect->height; ++j) { |
Urvang Joshi | 606c430 | 2013-09-30 16:48:39 -0700 | [diff] [blame] | 92 | for (i = rect->x_offset; i < rect->x_offset + rect->width; ++i) { |
| 93 | const uint32_t src_pixel = src->argb[j * src->argb_stride + i]; |
| 94 | const int src_alpha = src_pixel >> 24; |
| 95 | const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i]; |
| 96 | const int dst_alpha = dst_pixel >> 24; |
| 97 | if (dst_alpha == 0 && src_alpha == 0xff) { |
| 98 | dst->argb[j * dst->argb_stride + i] = src_pixel; |
| 99 | } |
| 100 | } |
| 101 | } |
| 102 | } |
| 103 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 104 | // Replace similar blocks of pixels by a 'see-through' transparent block |
| 105 | // with uniform average color. |
| 106 | static void FlattenSimilarBlocks(const WebPPicture* const src, |
| 107 | const WebPFrameRect* const rect, |
| 108 | WebPPicture* const dst) { |
skal | 0012519 | 2013-10-08 15:04:52 +0200 | [diff] [blame] | 109 | int i, j; |
| 110 | const int block_size = 8; |
| 111 | const int y_start = (rect->y_offset + block_size) & ~(block_size - 1); |
| 112 | const int y_end = (rect->y_offset + rect->height) & ~(block_size - 1); |
| 113 | const int x_start = (rect->x_offset + block_size) & ~(block_size - 1); |
| 114 | const int x_end = (rect->x_offset + rect->width) & ~(block_size - 1); |
| 115 | assert(src != NULL && dst != NULL && rect != NULL); |
| 116 | assert(src->width == dst->width && src->height == dst->height); |
| 117 | assert((block_size & (block_size - 1)) == 0); // must be a power of 2 |
| 118 | // Iterate over each block and count similar pixels. |
| 119 | for (j = y_start; j < y_end; j += block_size) { |
| 120 | for (i = x_start; i < x_end; i += block_size) { |
| 121 | int cnt = 0; |
| 122 | int avg_r = 0, avg_g = 0, avg_b = 0; |
| 123 | int x, y; |
| 124 | const uint32_t* const psrc = src->argb + j * src->argb_stride + i; |
| 125 | uint32_t* const pdst = dst->argb + j * dst->argb_stride + i; |
| 126 | for (y = 0; y < block_size; ++y) { |
| 127 | for (x = 0; x < block_size; ++x) { |
| 128 | const uint32_t src_pixel = psrc[x + y * src->argb_stride]; |
| 129 | const int alpha = src_pixel >> 24; |
| 130 | if (alpha == 0xff && |
| 131 | src_pixel == pdst[x + y * dst->argb_stride]) { |
| 132 | ++cnt; |
| 133 | avg_r += (src_pixel >> 16) & 0xff; |
| 134 | avg_g += (src_pixel >> 8) & 0xff; |
| 135 | avg_b += (src_pixel >> 0) & 0xff; |
| 136 | } |
| 137 | } |
| 138 | } |
| 139 | // If we have a fully similar block, we replace it with an |
| 140 | // average transparent block. This compresses better in lossy mode. |
| 141 | if (cnt == block_size * block_size) { |
| 142 | const uint32_t color = (0x00 << 24) | |
| 143 | ((avg_r / cnt) << 16) | |
| 144 | ((avg_g / cnt) << 8) | |
| 145 | ((avg_b / cnt) << 0); |
| 146 | for (y = 0; y < block_size; ++y) { |
| 147 | for (x = 0; x < block_size; ++x) { |
| 148 | pdst[x + y * dst->argb_stride] = color; |
| 149 | } |
| 150 | } |
| 151 | } |
| 152 | } |
| 153 | } |
| 154 | } |
| 155 | |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 156 | //------------------------------------------------------------------------------ |
| 157 | // Key frame related utilities. |
| 158 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 159 | // Returns true if 'curr' frame with frame rectangle 'curr_rect' is a key frame, |
| 160 | // that is, it can be decoded independently of 'prev' canvas. |
| 161 | static int IsKeyFrame(const WebPPicture* const curr, |
| 162 | const WebPFrameRect* const curr_rect, |
| 163 | const WebPPicture* const prev) { |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 164 | int i, j; |
| 165 | int is_key_frame = 1; |
| 166 | |
| 167 | // If previous canvas (with previous frame disposed) is all transparent, |
| 168 | // current frame is a key frame. |
Pascal Massimino | 9d56290 | 2014-06-24 20:20:29 +0000 | [diff] [blame] | 169 | for (j = 0; j < prev->height; ++j) { |
| 170 | const uint32_t* const row = &prev->argb[j * prev->argb_stride]; |
| 171 | for (i = 0; i < prev->width; ++i) { |
| 172 | if (row[i] & 0xff000000u) { // has alpha? |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 173 | is_key_frame = 0; |
| 174 | break; |
| 175 | } |
| 176 | } |
| 177 | if (!is_key_frame) break; |
| 178 | } |
| 179 | if (is_key_frame) return 1; |
| 180 | |
| 181 | // If current frame covers the whole canvas and does not contain any |
| 182 | // transparent pixels that depend on previous canvas, then current frame is |
| 183 | // a key frame. |
| 184 | if (curr_rect->width == curr->width && curr_rect->height == curr->height) { |
| 185 | assert(curr_rect->x_offset == 0 && curr_rect->y_offset == 0); |
| 186 | is_key_frame = 1; |
| 187 | for (j = 0; j < prev->height; ++j) { |
| 188 | for (i = 0; i < prev->width; ++i) { |
| 189 | const uint32_t prev_alpha = |
| 190 | (prev->argb[j * prev->argb_stride + i]) >> 24; |
| 191 | const uint32_t curr_alpha = |
| 192 | (curr->argb[j * curr->argb_stride + i]) >> 24; |
| 193 | if (curr_alpha != 0xff && prev_alpha != 0) { |
| 194 | is_key_frame = 0; |
| 195 | break; |
| 196 | } |
| 197 | } |
| 198 | if (!is_key_frame) break; |
| 199 | } |
| 200 | if (is_key_frame) return 1; |
| 201 | } |
| 202 | |
| 203 | return 0; |
| 204 | } |
| 205 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 206 | // Given 'prev' frame and current frame rectangle 'rect', convert 'curr' frame |
| 207 | // to a key frame. |
| 208 | static void ConvertToKeyFrame(const WebPPicture* const prev, |
| 209 | WebPFrameRect* const rect, |
| 210 | WebPPicture* const curr) { |
Urvang Joshi | d538cea | 2013-09-12 13:41:09 -0700 | [diff] [blame] | 211 | int j; |
| 212 | assert(curr->width == prev->width && curr->height == prev->height); |
| 213 | |
| 214 | // Replace transparent pixels of current canvas with those from previous |
| 215 | // canvas (with previous frame disposed). |
| 216 | for (j = 0; j < curr->height; ++j) { |
| 217 | int i; |
| 218 | for (i = 0; i < curr->width; ++i) { |
| 219 | uint32_t* const curr_pixel = curr->argb + j * curr->argb_stride + i; |
| 220 | const int curr_alpha = *curr_pixel >> 24; |
| 221 | if (curr_alpha == 0) { |
| 222 | *curr_pixel = prev->argb[j * prev->argb_stride + i]; |
| 223 | } |
| 224 | } |
| 225 | } |
| 226 | |
| 227 | // Frame rectangle now covers the whole canvas. |
| 228 | rect->x_offset = 0; |
| 229 | rect->y_offset = 0; |
| 230 | rect->width = curr->width; |
| 231 | rect->height = curr->height; |
| 232 | } |
| 233 | |
| 234 | //------------------------------------------------------------------------------ |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 235 | // Encoded frame. |
| 236 | |
| 237 | // Used to store two candidates of encoded data for an animation frame. One of |
| 238 | // the two will be chosen later. |
| 239 | typedef struct { |
| 240 | WebPMuxFrameInfo sub_frame; // Encoded frame rectangle. |
| 241 | WebPMuxFrameInfo key_frame; // Encoded frame if it was converted to keyframe. |
| 242 | } EncodedFrame; |
| 243 | |
| 244 | // Release the data contained by 'encoded_frame'. |
| 245 | static void FrameRelease(EncodedFrame* const encoded_frame) { |
Pascal Massimino | 3630124 | 2013-12-16 13:31:45 -0800 | [diff] [blame] | 246 | if (encoded_frame != NULL) { |
| 247 | WebPDataClear(&encoded_frame->sub_frame.bitstream); |
| 248 | WebPDataClear(&encoded_frame->key_frame.bitstream); |
| 249 | memset(encoded_frame, 0, sizeof(*encoded_frame)); |
| 250 | } |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 251 | } |
| 252 | |
| 253 | //------------------------------------------------------------------------------ |
| 254 | // Frame cache. |
| 255 | |
| 256 | // Used to store encoded frames that haven't been output yet. |
| 257 | struct WebPFrameCache { |
| 258 | EncodedFrame* encoded_frames; // Array of encoded frames. |
| 259 | size_t size; // Number of allocated data elements. |
| 260 | size_t start; // Start index. |
| 261 | size_t count; // Number of valid data elements. |
| 262 | int flush_count; // If >0, ‘flush_count’ frames starting from |
| 263 | // 'start' are ready to be added to mux. |
| 264 | int64_t best_delta; // min(canvas size - frame size) over the frames. |
| 265 | // Can be negative in certain cases due to |
| 266 | // transparent pixels in a frame. |
| 267 | int keyframe; // Index of selected keyframe relative to 'start'. |
| 268 | |
| 269 | size_t kmin; // Min distance between key frames. |
| 270 | size_t kmax; // Max distance between key frames. |
| 271 | size_t count_since_key_frame; // Frames seen since the last key frame. |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 272 | int allow_mixed; // If true, each frame can be lossy or lossless. |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 273 | WebPPicture prev_canvas; // Previous canvas (properly disposed). |
| 274 | WebPPicture curr_canvas; // Current canvas (temporary buffer). |
| 275 | int is_first_frame; // True if no frames have been added to the cache |
| 276 | // since WebPFrameCacheNew(). |
| 277 | }; |
| 278 | |
| 279 | // Reset the counters in the cache struct. Doesn't touch 'cache->encoded_frames' |
| 280 | // and 'cache->size'. |
| 281 | static void CacheReset(WebPFrameCache* const cache) { |
| 282 | cache->start = 0; |
| 283 | cache->count = 0; |
| 284 | cache->flush_count = 0; |
| 285 | cache->best_delta = DELTA_INFINITY; |
| 286 | cache->keyframe = KEYFRAME_NONE; |
| 287 | } |
| 288 | |
| 289 | WebPFrameCache* WebPFrameCacheNew(int width, int height, |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 290 | size_t kmin, size_t kmax, int allow_mixed) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 291 | WebPFrameCache* cache = (WebPFrameCache*)malloc(sizeof(*cache)); |
| 292 | if (cache == NULL) return NULL; |
| 293 | CacheReset(cache); |
Pascal Massimino | 3630124 | 2013-12-16 13:31:45 -0800 | [diff] [blame] | 294 | // sanity init, so we can call WebPFrameCacheDelete(): |
| 295 | cache->encoded_frames = NULL; |
| 296 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 297 | cache->is_first_frame = 1; |
| 298 | |
| 299 | // Picture buffers. |
| 300 | if (!WebPPictureInit(&cache->prev_canvas) || |
| 301 | !WebPPictureInit(&cache->curr_canvas)) { |
| 302 | return NULL; |
| 303 | } |
| 304 | cache->prev_canvas.width = width; |
| 305 | cache->prev_canvas.height = height; |
| 306 | cache->prev_canvas.use_argb = 1; |
| 307 | if (!WebPPictureAlloc(&cache->prev_canvas) || |
| 308 | !WebPPictureCopy(&cache->prev_canvas, &cache->curr_canvas)) { |
| 309 | goto Err; |
| 310 | } |
| 311 | WebPUtilClearPic(&cache->prev_canvas, NULL); |
| 312 | |
| 313 | // Cache data. |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 314 | cache->allow_mixed = allow_mixed; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 315 | cache->kmin = kmin; |
| 316 | cache->kmax = kmax; |
| 317 | cache->count_since_key_frame = 0; |
| 318 | assert(kmax > kmin); |
| 319 | cache->size = kmax - kmin; |
| 320 | cache->encoded_frames = |
| 321 | (EncodedFrame*)calloc(cache->size, sizeof(*cache->encoded_frames)); |
| 322 | if (cache->encoded_frames == NULL) goto Err; |
| 323 | |
| 324 | return cache; // All OK. |
| 325 | |
| 326 | Err: |
| 327 | WebPFrameCacheDelete(cache); |
| 328 | return NULL; |
| 329 | } |
| 330 | |
| 331 | void WebPFrameCacheDelete(WebPFrameCache* const cache) { |
| 332 | if (cache != NULL) { |
Pascal Massimino | 3630124 | 2013-12-16 13:31:45 -0800 | [diff] [blame] | 333 | if (cache->encoded_frames != NULL) { |
| 334 | size_t i; |
| 335 | for (i = 0; i < cache->size; ++i) { |
| 336 | FrameRelease(&cache->encoded_frames[i]); |
| 337 | } |
| 338 | free(cache->encoded_frames); |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 339 | } |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 340 | WebPPictureFree(&cache->prev_canvas); |
| 341 | WebPPictureFree(&cache->curr_canvas); |
| 342 | free(cache); |
| 343 | } |
| 344 | } |
| 345 | |
| 346 | static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic, |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 347 | WebPMemoryWriter* const memory) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 348 | pic->use_argb = 1; |
| 349 | pic->writer = WebPMemoryWrite; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 350 | pic->custom_ptr = memory; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 351 | if (!WebPEncode(config, pic)) { |
| 352 | return 0; |
| 353 | } |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 354 | return 1; |
| 355 | } |
| 356 | |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 357 | static void GetEncodedData(const WebPMemoryWriter* const memory, |
| 358 | WebPData* const encoded_data) { |
| 359 | encoded_data->bytes = memory->mem; |
| 360 | encoded_data->size = memory->size; |
| 361 | } |
| 362 | |
| 363 | #define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold. |
| 364 | #define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold. |
| 365 | #define MAX_COLOR_COUNT 256 // Power of 2 greater than MAX_COLORS_LOSSLESS. |
| 366 | #define HASH_SIZE (MAX_COLOR_COUNT * 4) |
| 367 | #define HASH_RIGHT_SHIFT 22 // 32 - log2(HASH_SIZE). |
| 368 | |
| 369 | // TODO(urvang): Also used in enc/vp8l.c. Move to utils. |
| 370 | // If the number of colors in the 'pic' is at least MAX_COLOR_COUNT, return |
| 371 | // MAX_COLOR_COUNT. Otherwise, return the exact number of colors in the 'pic'. |
| 372 | static int GetColorCount(const WebPPicture* const pic) { |
| 373 | int x, y; |
| 374 | int num_colors = 0; |
| 375 | uint8_t in_use[HASH_SIZE] = { 0 }; |
| 376 | uint32_t colors[HASH_SIZE]; |
| 377 | static const uint32_t kHashMul = 0x1e35a7bd; |
| 378 | const uint32_t* argb = pic->argb; |
| 379 | const int width = pic->width; |
| 380 | const int height = pic->height; |
| 381 | uint32_t last_pix = ~argb[0]; // so we're sure that last_pix != argb[0] |
| 382 | |
| 383 | for (y = 0; y < height; ++y) { |
| 384 | for (x = 0; x < width; ++x) { |
| 385 | int key; |
| 386 | if (argb[x] == last_pix) { |
| 387 | continue; |
| 388 | } |
| 389 | last_pix = argb[x]; |
| 390 | key = (kHashMul * last_pix) >> HASH_RIGHT_SHIFT; |
| 391 | while (1) { |
| 392 | if (!in_use[key]) { |
| 393 | colors[key] = last_pix; |
| 394 | in_use[key] = 1; |
| 395 | ++num_colors; |
| 396 | if (num_colors >= MAX_COLOR_COUNT) { |
| 397 | return MAX_COLOR_COUNT; // Exact count not needed. |
| 398 | } |
| 399 | break; |
| 400 | } else if (colors[key] == last_pix) { |
| 401 | break; // The color is already there. |
| 402 | } else { |
| 403 | // Some other color sits here, so do linear conflict resolution. |
| 404 | ++key; |
| 405 | key &= (HASH_SIZE - 1); // Key mask. |
| 406 | } |
| 407 | } |
| 408 | } |
| 409 | argb += pic->argb_stride; |
| 410 | } |
| 411 | return num_colors; |
| 412 | } |
| 413 | |
| 414 | #undef MAX_COLOR_COUNT |
| 415 | #undef HASH_SIZE |
| 416 | #undef HASH_RIGHT_SHIFT |
| 417 | |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 418 | static WebPEncodingError SetFrame(const WebPConfig* const config, |
| 419 | int allow_mixed, int is_key_frame, |
| 420 | const WebPPicture* const prev_canvas, |
| 421 | WebPPicture* const frame, |
| 422 | const WebPFrameRect* const rect, |
| 423 | const WebPMuxFrameInfo* const info, |
| 424 | WebPPicture* const sub_frame, |
| 425 | EncodedFrame* encoded_frame) { |
| 426 | WebPEncodingError error_code = VP8_ENC_OK; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 427 | int try_lossless; |
| 428 | int try_lossy; |
| 429 | int try_both; |
| 430 | WebPMemoryWriter mem1, mem2; |
| 431 | WebPData* encoded_data; |
| 432 | WebPMuxFrameInfo* const dst = |
| 433 | is_key_frame ? &encoded_frame->key_frame : &encoded_frame->sub_frame; |
| 434 | *dst = *info; |
| 435 | encoded_data = &dst->bitstream; |
| 436 | WebPMemoryWriterInit(&mem1); |
| 437 | WebPMemoryWriterInit(&mem2); |
| 438 | |
| 439 | if (!allow_mixed) { |
| 440 | try_lossless = config->lossless; |
| 441 | try_lossy = !try_lossless; |
| 442 | } else { // Use a heuristic for trying lossless and/or lossy compression. |
| 443 | const int num_colors = GetColorCount(sub_frame); |
| 444 | try_lossless = (num_colors < MAX_COLORS_LOSSLESS); |
| 445 | try_lossy = (num_colors >= MIN_COLORS_LOSSY); |
| 446 | } |
| 447 | try_both = try_lossless && try_lossy; |
| 448 | |
| 449 | if (try_lossless) { |
| 450 | WebPConfig config_ll = *config; |
| 451 | config_ll.lossless = 1; |
| 452 | if (!EncodeFrame(&config_ll, sub_frame, &mem1)) { |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 453 | error_code = sub_frame->error_code; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 454 | goto Err; |
| 455 | } |
| 456 | } |
| 457 | |
| 458 | if (try_lossy) { |
| 459 | WebPConfig config_lossy = *config; |
| 460 | config_lossy.lossless = 0; |
| 461 | if (!is_key_frame) { |
| 462 | // For lossy compression of a frame, it's better to replace transparent |
| 463 | // pixels of 'curr' with actual RGB values, whenever possible. |
| 464 | ReduceTransparency(prev_canvas, rect, frame); |
| 465 | // TODO(later): Investigate if this helps lossless compression as well. |
| 466 | FlattenSimilarBlocks(prev_canvas, rect, frame); |
| 467 | } |
| 468 | if (!EncodeFrame(&config_lossy, sub_frame, &mem2)) { |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 469 | error_code = sub_frame->error_code; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 470 | goto Err; |
| 471 | } |
| 472 | } |
| 473 | |
| 474 | if (try_both) { // Pick the encoding with smallest size. |
| 475 | // TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should |
| 476 | // also be a criteria, in addition to sizes. |
| 477 | if (mem1.size <= mem2.size) { |
James Zern | 6c83157 | 2014-10-13 13:46:13 +0200 | [diff] [blame] | 478 | #if WEBP_ENCODER_ABI_VERSION > 0x0203 |
skal | af93bdd | 2014-03-27 23:27:32 +0100 | [diff] [blame] | 479 | WebPMemoryWriterClear(&mem2); |
James Zern | c2fc52e | 2014-07-22 20:24:59 -0700 | [diff] [blame] | 480 | #else |
| 481 | free(mem2.mem); |
| 482 | memset(&mem2, 0, sizeof(mem2)); |
| 483 | #endif |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 484 | GetEncodedData(&mem1, encoded_data); |
| 485 | } else { |
James Zern | 6c83157 | 2014-10-13 13:46:13 +0200 | [diff] [blame] | 486 | #if WEBP_ENCODER_ABI_VERSION > 0x0203 |
skal | af93bdd | 2014-03-27 23:27:32 +0100 | [diff] [blame] | 487 | WebPMemoryWriterClear(&mem1); |
James Zern | c2fc52e | 2014-07-22 20:24:59 -0700 | [diff] [blame] | 488 | #else |
| 489 | free(mem1.mem); |
| 490 | memset(&mem1, 0, sizeof(mem1)); |
| 491 | #endif |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 492 | GetEncodedData(&mem2, encoded_data); |
| 493 | } |
| 494 | } else { |
| 495 | GetEncodedData(try_lossless ? &mem1 : &mem2, encoded_data); |
| 496 | } |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 497 | return error_code; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 498 | |
| 499 | Err: |
James Zern | 6c83157 | 2014-10-13 13:46:13 +0200 | [diff] [blame] | 500 | #if WEBP_ENCODER_ABI_VERSION > 0x0203 |
skal | af93bdd | 2014-03-27 23:27:32 +0100 | [diff] [blame] | 501 | WebPMemoryWriterClear(&mem1); |
| 502 | WebPMemoryWriterClear(&mem2); |
James Zern | c2fc52e | 2014-07-22 20:24:59 -0700 | [diff] [blame] | 503 | #else |
| 504 | free(mem1.mem); |
| 505 | free(mem2.mem); |
| 506 | #endif |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 507 | return error_code; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 508 | } |
| 509 | |
| 510 | #undef MIN_COLORS_LOSSY |
| 511 | #undef MAX_COLORS_LOSSLESS |
| 512 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 513 | // Returns cached frame at given 'position' index. |
| 514 | static EncodedFrame* CacheGetFrame(const WebPFrameCache* const cache, |
| 515 | size_t position) { |
| 516 | assert(cache->start + position < cache->size); |
| 517 | return &cache->encoded_frames[cache->start + position]; |
| 518 | } |
| 519 | |
| 520 | // Calculate the penalty incurred if we encode given frame as a key frame |
| 521 | // instead of a sub-frame. |
| 522 | static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) { |
| 523 | return ((int64_t)encoded_frame->key_frame.bitstream.size - |
| 524 | encoded_frame->sub_frame.bitstream.size); |
| 525 | } |
| 526 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 527 | static void DisposeFrame(WebPMuxAnimDispose dispose_method, |
| 528 | const WebPFrameRect* const gif_rect, |
| 529 | WebPPicture* const frame, WebPPicture* const canvas) { |
| 530 | if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) { |
| 531 | WebPUtilClearPic(frame, NULL); |
| 532 | WebPUtilClearPic(canvas, gif_rect); |
| 533 | } |
| 534 | } |
| 535 | |
| 536 | int WebPFrameCacheAddFrame(WebPFrameCache* const cache, |
| 537 | const WebPConfig* const config, |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 538 | const WebPFrameRect* const orig_rect_ptr, |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 539 | WebPPicture* const frame, |
| 540 | WebPMuxFrameInfo* const info) { |
| 541 | int ok = 0; |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 542 | WebPEncodingError error_code = VP8_ENC_OK; |
| 543 | WebPFrameRect rect; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 544 | WebPPicture sub_image; // View extracted from 'frame' with rectangle 'rect'. |
| 545 | WebPPicture* const prev_canvas = &cache->prev_canvas; |
| 546 | const size_t position = cache->count; |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 547 | const int allow_mixed = cache->allow_mixed; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 548 | EncodedFrame* const encoded_frame = CacheGetFrame(cache, position); |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 549 | WebPFrameRect orig_rect; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 550 | assert(position < cache->size); |
| 551 | |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 552 | if (frame == NULL || info == NULL) { |
| 553 | return 0; |
| 554 | } |
| 555 | |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 556 | if (orig_rect_ptr == NULL) { |
| 557 | orig_rect.width = frame->width; |
| 558 | orig_rect.height = frame->height; |
| 559 | orig_rect.x_offset = 0; |
| 560 | orig_rect.y_offset = 0; |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 561 | } else { |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 562 | orig_rect = *orig_rect_ptr; |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 563 | } |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 564 | |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 565 | // Snap to even offsets (and adjust dimensions if needed). |
| 566 | rect = orig_rect; |
| 567 | rect.width += (rect.x_offset & 1); |
| 568 | rect.height += (rect.y_offset & 1); |
| 569 | rect.x_offset &= ~1; |
| 570 | rect.y_offset &= ~1; |
| 571 | |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 572 | if (!WebPPictureView(frame, rect.x_offset, rect.y_offset, |
| 573 | rect.width, rect.height, &sub_image)) { |
| 574 | return 0; |
| 575 | } |
| 576 | info->x_offset = rect.x_offset; |
| 577 | info->y_offset = rect.y_offset; |
| 578 | |
| 579 | ++cache->count; |
| 580 | |
| 581 | if (cache->is_first_frame || IsKeyFrame(frame, &rect, prev_canvas)) { |
| 582 | // Add this as a key frame. |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 583 | error_code = SetFrame(config, allow_mixed, 1, NULL, NULL, NULL, |
| 584 | info, &sub_image, encoded_frame); |
| 585 | if (error_code != VP8_ENC_OK) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 586 | goto End; |
| 587 | } |
| 588 | cache->keyframe = position; |
| 589 | cache->flush_count = cache->count; |
| 590 | cache->count_since_key_frame = 0; |
| 591 | // Update prev_canvas by simply copying from 'curr'. |
| 592 | CopyPixels(frame, prev_canvas); |
| 593 | } else { |
| 594 | ++cache->count_since_key_frame; |
| 595 | if (cache->count_since_key_frame <= cache->kmin) { |
| 596 | // Add this as a frame rectangle. |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 597 | error_code = SetFrame(config, allow_mixed, 0, prev_canvas, frame, |
| 598 | &rect, info, &sub_image, encoded_frame); |
| 599 | if (error_code != VP8_ENC_OK) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 600 | goto End; |
| 601 | } |
| 602 | cache->flush_count = cache->count; |
| 603 | // Update prev_canvas by blending 'curr' into it. |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 604 | BlendPixels(frame, &orig_rect, prev_canvas); |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 605 | } else { |
| 606 | WebPPicture full_image; |
| 607 | WebPMuxFrameInfo full_image_info; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 608 | int64_t curr_delta; |
| 609 | |
| 610 | // Add frame rectangle to cache. |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 611 | error_code = SetFrame(config, allow_mixed, 0, prev_canvas, frame, &rect, |
| 612 | info, &sub_image, encoded_frame); |
| 613 | if (error_code != VP8_ENC_OK) { |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 614 | goto End; |
| 615 | } |
| 616 | |
| 617 | // Convert to a key frame. |
| 618 | CopyPixels(frame, &cache->curr_canvas); |
| 619 | ConvertToKeyFrame(prev_canvas, &rect, &cache->curr_canvas); |
| 620 | if (!WebPPictureView(&cache->curr_canvas, rect.x_offset, rect.y_offset, |
| 621 | rect.width, rect.height, &full_image)) { |
| 622 | goto End; |
| 623 | } |
| 624 | full_image_info = *info; |
| 625 | full_image_info.x_offset = rect.x_offset; |
| 626 | full_image_info.y_offset = rect.y_offset; |
| 627 | |
| 628 | // Add key frame to cache, too. |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 629 | error_code = SetFrame(config, allow_mixed, 1, NULL, NULL, NULL, |
| 630 | &full_image_info, &full_image, encoded_frame); |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 631 | WebPPictureFree(&full_image); |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 632 | if (error_code != VP8_ENC_OK) goto End; |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 633 | |
| 634 | // Analyze size difference of the two variants. |
| 635 | curr_delta = KeyFramePenalty(encoded_frame); |
| 636 | if (curr_delta <= cache->best_delta) { // Pick this as keyframe. |
| 637 | cache->keyframe = position; |
| 638 | cache->best_delta = curr_delta; |
| 639 | cache->flush_count = cache->count - 1; // We can flush previous frames. |
| 640 | } |
| 641 | if (cache->count_since_key_frame == cache->kmax) { |
| 642 | cache->flush_count = cache->count; |
| 643 | cache->count_since_key_frame = 0; |
| 644 | } |
| 645 | |
| 646 | // Update prev_canvas by simply copying from 'curr_canvas'. |
| 647 | CopyPixels(&cache->curr_canvas, prev_canvas); |
| 648 | } |
| 649 | } |
| 650 | |
Pascal Massimino | 2bfd1ff | 2014-06-11 23:07:45 -0700 | [diff] [blame] | 651 | DisposeFrame(info->dispose_method, &orig_rect, frame, prev_canvas); |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 652 | |
| 653 | cache->is_first_frame = 0; |
| 654 | ok = 1; |
| 655 | |
| 656 | End: |
| 657 | WebPPictureFree(&sub_image); |
Urvang Joshi | 73f5213 | 2013-11-17 18:04:07 -0800 | [diff] [blame] | 658 | if (!ok) { |
| 659 | FrameRelease(encoded_frame); |
| 660 | --cache->count; // We reset the count, as the frame addition failed. |
| 661 | } |
skal | f05fe00 | 2014-06-11 23:26:47 +0200 | [diff] [blame] | 662 | frame->error_code = error_code; // report error_code |
| 663 | assert(ok || error_code != VP8_ENC_OK); |
Urvang Joshi | 38efdc2 | 2013-10-14 14:39:46 -0700 | [diff] [blame] | 664 | return ok; |
| 665 | } |
| 666 | |
| 667 | WebPMuxError WebPFrameCacheFlush(WebPFrameCache* const cache, int verbose, |
| 668 | WebPMux* const mux) { |
| 669 | while (cache->flush_count > 0) { |
| 670 | WebPMuxFrameInfo* info; |
| 671 | WebPMuxError err; |
| 672 | EncodedFrame* const curr = CacheGetFrame(cache, 0); |
| 673 | // Pick frame or full canvas. |
| 674 | if (cache->keyframe == 0) { |
| 675 | info = &curr->key_frame; |
| 676 | info->blend_method = WEBP_MUX_NO_BLEND; |
| 677 | cache->keyframe = KEYFRAME_NONE; |
| 678 | cache->best_delta = DELTA_INFINITY; |
| 679 | } else { |
| 680 | info = &curr->sub_frame; |
| 681 | info->blend_method = WEBP_MUX_BLEND; |
| 682 | } |
| 683 | // Add to mux. |
| 684 | err = WebPMuxPushFrame(mux, info, 1); |
| 685 | if (err != WEBP_MUX_OK) return err; |
| 686 | if (verbose) { |
| 687 | printf("Added frame. offset:%d,%d duration:%d dispose:%d blend:%d\n", |
| 688 | info->x_offset, info->y_offset, info->duration, |
| 689 | info->dispose_method, info->blend_method); |
| 690 | } |
| 691 | FrameRelease(curr); |
| 692 | ++cache->start; |
| 693 | --cache->flush_count; |
| 694 | --cache->count; |
| 695 | if (cache->keyframe != KEYFRAME_NONE) --cache->keyframe; |
| 696 | } |
| 697 | |
| 698 | if (cache->count == 0) CacheReset(cache); |
| 699 | return WEBP_MUX_OK; |
| 700 | } |
| 701 | |
| 702 | WebPMuxError WebPFrameCacheFlushAll(WebPFrameCache* const cache, int verbose, |
| 703 | WebPMux* const mux) { |
| 704 | cache->flush_count = cache->count; // Force flushing of all frames. |
| 705 | return WebPFrameCacheFlush(cache, verbose, mux); |
| 706 | } |
| 707 | |
| 708 | //------------------------------------------------------------------------------ |