Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 1 | // Copyright 2015 Google Inc. All Rights Reserved. |
| 2 | // |
| 3 | // Use of this source code is governed by a BSD-style license |
| 4 | // that can be found in the COPYING file in the root of the source |
| 5 | // tree. An additional intellectual property rights grant can be found |
| 6 | // in the file PATENTS. All contributing project authors may |
| 7 | // be found in the AUTHORS file in the root of the source tree. |
| 8 | // ----------------------------------------------------------------------------- |
| 9 | // |
| 10 | // Utilities for animated images |
| 11 | |
| 12 | #include "./anim_util.h" |
| 13 | |
| 14 | #include <assert.h> |
| 15 | #include <math.h> |
| 16 | #include <stdio.h> |
| 17 | #include <string.h> |
| 18 | |
| 19 | #include <fstream> |
| 20 | #include <sstream> // for 'ostringstream'. |
| 21 | |
| 22 | #ifdef WEBP_HAVE_GIF |
| 23 | #include <gif_lib.h> |
| 24 | #endif |
| 25 | #include "webp/format_constants.h" |
| 26 | #include "webp/decode.h" |
| 27 | #include "webp/demux.h" |
| 28 | |
| 29 | using std::ifstream; |
| 30 | using std::ios; |
| 31 | using std::ofstream; |
| 32 | using std::ostringstream; |
| 33 | |
| 34 | static const int kNumChannels = 4; |
| 35 | |
| 36 | // ----------------------------------------------------------------------------- |
| 37 | // Common utilities. |
| 38 | |
| 39 | // Returns true if the frame covers the full canvas. |
| 40 | static bool IsFullFrame(int width, int height, |
| 41 | int canvas_width, int canvas_height) { |
| 42 | return (width == canvas_width && height == canvas_height); |
| 43 | } |
| 44 | |
| 45 | static void AllocateFrames(AnimatedImage* const image, uint32_t frame_count) { |
| 46 | image->frames.resize(frame_count); |
| 47 | for (size_t i = 0; i < image->frames.size(); ++i) { |
| 48 | const size_t rgba_size = |
| 49 | image->canvas_width * kNumChannels * image->canvas_height; |
| 50 | image->frames[i].rgba.resize(rgba_size); |
| 51 | } |
| 52 | } |
| 53 | |
| 54 | // Clear the canvas to transparent. |
| 55 | static void ZeroFillCanvas(uint8_t* rgba, |
| 56 | uint32_t canvas_width, uint32_t canvas_height) { |
| 57 | memset(rgba, 0, canvas_width * kNumChannels * canvas_height); |
| 58 | } |
| 59 | |
| 60 | // Clear given frame rectangle to transparent. |
| 61 | static void ZeroFillFrameRect(uint8_t* rgba, int rgba_stride, int x_offset, |
| 62 | int y_offset, int width, int height) { |
| 63 | assert(width * kNumChannels <= rgba_stride); |
| 64 | rgba += y_offset * rgba_stride + x_offset * kNumChannels; |
| 65 | for (int j = 0; j < height; ++j) { |
| 66 | memset(rgba, 0, width * kNumChannels); |
| 67 | rgba += rgba_stride; |
| 68 | } |
| 69 | } |
| 70 | |
| 71 | // Copy width * height pixels from 'src' to 'dst'. |
| 72 | static void CopyCanvas(const uint8_t* src, uint8_t* dst, |
| 73 | uint32_t width, uint32_t height) { |
| 74 | assert(src != NULL && dst != NULL); |
| 75 | memcpy(dst, src, width * kNumChannels * height); |
| 76 | } |
| 77 | |
| 78 | // Copy pixels in the given rectangle from 'src' to 'dst' honoring the 'stride'. |
| 79 | static void CopyFrameRectangle(const uint8_t* src, uint8_t* dst, int stride, |
| 80 | int x_offset, int y_offset, |
| 81 | int width, int height) { |
| 82 | const int width_in_bytes = width * kNumChannels; |
| 83 | assert(width_in_bytes <= stride); |
| 84 | const size_t offset = y_offset * stride + x_offset * kNumChannels; |
| 85 | src += offset; |
| 86 | dst += offset; |
| 87 | for (int j = 0; j < height; ++j) { |
| 88 | memcpy(dst, src, width_in_bytes); |
| 89 | src += stride; |
| 90 | dst += stride; |
| 91 | } |
| 92 | } |
| 93 | |
| 94 | // Canonicalize all transparent pixels to transparent black to aid comparison. |
| 95 | static void CleanupTransparentPixels(uint32_t* rgba, |
| 96 | uint32_t width, uint32_t height) { |
| 97 | const uint32_t* const rgba_end = rgba + width * height; |
| 98 | while (rgba < rgba_end) { |
| 99 | const uint8_t alpha = (*rgba >> 24) & 0xff; |
| 100 | if (alpha == 0) { |
| 101 | *rgba = 0; |
| 102 | } |
| 103 | ++rgba; |
| 104 | } |
| 105 | } |
| 106 | |
| 107 | // Dump frame to a PAM file. |
| 108 | // Returns true on success. |
| 109 | static bool DumpFrame(const char filename[], const char dump_folder[], |
| 110 | uint32_t frame_num, const uint8_t rgba[], |
| 111 | int canvas_width, int canvas_height) { |
| 112 | const std::string filename_str = filename; |
| 113 | const size_t slash_idx = filename_str.find_last_of("/\\"); |
| 114 | const std::string base_name = (slash_idx != std::string::npos) |
| 115 | ? filename_str.substr(slash_idx + 1) |
| 116 | : filename_str; |
| 117 | ostringstream dump_file; |
| 118 | dump_file << dump_folder << "/" << base_name << "_frame_" << frame_num |
| 119 | << ".pam"; |
| 120 | |
| 121 | ofstream fout(dump_file.str().c_str(), ios::binary | ios::out); |
| 122 | if (!fout.good()) { |
| 123 | fprintf(stderr, "Error opening file for writing: %s\n", |
| 124 | dump_file.str().c_str()); |
| 125 | return false; |
| 126 | } |
| 127 | |
| 128 | fout << "P7\nWIDTH " << canvas_width << "\nHEIGHT " << canvas_height |
| 129 | << "\nDEPTH 4\nMAXVAL 255\nTUPLTYPE RGB_ALPHA\nENDHDR\n"; |
| 130 | for (int y = 0; y < canvas_height; ++y) { |
| 131 | fout.write( |
| 132 | reinterpret_cast<const char*>(rgba) + y * canvas_width * kNumChannels, |
| 133 | canvas_width * kNumChannels); |
| 134 | if (!fout.good()) { |
| 135 | fprintf(stderr, "Error writing to file: %s\n", dump_file.str().c_str()); |
| 136 | return 0; |
| 137 | } |
| 138 | } |
| 139 | fout.close(); |
| 140 | return true; |
| 141 | } |
| 142 | |
| 143 | // ----------------------------------------------------------------------------- |
| 144 | // WebP Decoding. |
| 145 | |
| 146 | // Returns true if this is a valid WebP bitstream. |
| 147 | static bool IsWebP(const std::string& file_str) { |
| 148 | return WebPGetInfo(reinterpret_cast<const uint8_t*>(file_str.c_str()), |
| 149 | file_str.length(), NULL, NULL) != 0; |
| 150 | } |
| 151 | |
| 152 | // Returns true if the current frame is a key-frame. |
| 153 | static bool IsKeyFrameWebP(const WebPIterator& curr, const WebPIterator& prev, |
| 154 | const DecodedFrame* const prev_frame, |
| 155 | int canvas_width, int canvas_height) { |
| 156 | if (prev_frame == NULL) { |
| 157 | return true; |
| 158 | } else if ((!curr.has_alpha || curr.blend_method == WEBP_MUX_NO_BLEND) && |
| 159 | IsFullFrame(curr.width, curr.height, |
| 160 | canvas_width, canvas_height)) { |
| 161 | return true; |
| 162 | } else { |
| 163 | return (prev.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) && |
| 164 | (IsFullFrame(prev.width, prev.height, canvas_width, canvas_height) || |
| 165 | prev_frame->is_key_frame); |
| 166 | } |
| 167 | } |
| 168 | |
| 169 | // Blend a single channel of 'src' over 'dst', given their alpha channel values. |
| 170 | static uint8_t BlendChannelWebP(uint32_t src, uint8_t src_a, uint32_t dst, |
| 171 | uint8_t dst_a, uint32_t scale, int shift) { |
| 172 | const uint8_t src_channel = (src >> shift) & 0xff; |
| 173 | const uint8_t dst_channel = (dst >> shift) & 0xff; |
| 174 | const uint32_t blend_unscaled = src_channel * src_a + dst_channel * dst_a; |
| 175 | assert(blend_unscaled < (1ULL << 32) / scale); |
| 176 | return (blend_unscaled * scale) >> 24; |
| 177 | } |
| 178 | |
| 179 | // Blend 'src' over 'dst' assuming they are NOT pre-multiplied by alpha. |
| 180 | static uint32_t BlendPixelWebP(uint32_t src, uint32_t dst) { |
| 181 | const uint8_t src_a = (src >> 24) & 0xff; |
| 182 | |
| 183 | if (src_a == 0) { |
| 184 | return dst; |
| 185 | } else { |
| 186 | const uint8_t dst_a = (dst >> 24) & 0xff; |
| 187 | // This is the approximate integer arithmetic for the actual formula: |
| 188 | // dst_factor_a = (dst_a * (255 - src_a)) / 255. |
| 189 | const uint8_t dst_factor_a = (dst_a * (256 - src_a)) >> 8; |
| 190 | assert(src_a + dst_factor_a < 256); |
| 191 | const uint8_t blend_a = src_a + dst_factor_a; |
| 192 | const uint32_t scale = (1UL << 24) / blend_a; |
| 193 | |
| 194 | const uint8_t blend_r = |
| 195 | BlendChannelWebP(src, src_a, dst, dst_factor_a, scale, 0); |
| 196 | const uint8_t blend_g = |
| 197 | BlendChannelWebP(src, src_a, dst, dst_factor_a, scale, 8); |
| 198 | const uint8_t blend_b = |
| 199 | BlendChannelWebP(src, src_a, dst, dst_factor_a, scale, 16); |
| 200 | |
| 201 | return (blend_r << 0) | (blend_g << 8) | (blend_b << 16) | (blend_a << 24); |
| 202 | } |
| 203 | } |
| 204 | |
| 205 | // Returns two ranges (<left, width> pairs) at row 'canvas_y', that belong to |
| 206 | // 'src' but not 'dst'. A point range is empty if the corresponding width is 0. |
| 207 | static void FindBlendRangeAtRowWebP(const WebPIterator* const src, |
| 208 | const WebPIterator* const dst, int canvas_y, |
| 209 | int* const left1, int* const width1, |
| 210 | int* const left2, int* const width2) { |
| 211 | const int src_max_x = src->x_offset + src->width; |
| 212 | const int dst_max_x = dst->x_offset + dst->width; |
| 213 | const int dst_max_y = dst->y_offset + dst->height; |
| 214 | assert(canvas_y >= src->y_offset && canvas_y < (src->y_offset + src->height)); |
| 215 | *left1 = -1; |
| 216 | *width1 = 0; |
| 217 | *left2 = -1; |
| 218 | *width2 = 0; |
| 219 | |
| 220 | if (canvas_y < dst->y_offset || canvas_y >= dst_max_y || |
| 221 | src->x_offset >= dst_max_x || src_max_x <= dst->x_offset) { |
| 222 | *left1 = src->x_offset; |
| 223 | *width1 = src->width; |
| 224 | return; |
| 225 | } |
| 226 | |
| 227 | if (src->x_offset < dst->x_offset) { |
| 228 | *left1 = src->x_offset; |
| 229 | *width1 = dst->x_offset - src->x_offset; |
| 230 | } |
| 231 | |
| 232 | if (src_max_x > dst_max_x) { |
| 233 | *left2 = dst_max_x; |
| 234 | *width2 = src_max_x - dst_max_x; |
| 235 | } |
| 236 | } |
| 237 | |
| 238 | // Blend 'num_pixels' in 'src' over 'dst'. |
| 239 | static void BlendPixelRowWebP(uint32_t* const src, const uint32_t* const dst, |
| 240 | int num_pixels) { |
| 241 | for (int i = 0; i < num_pixels; ++i) { |
| 242 | uint32_t* const src_pixel_ptr = &src[i]; |
| 243 | const uint8_t src_alpha = (*src_pixel_ptr >> 24) & 0xff; |
| 244 | if (src_alpha != 0xff) { |
| 245 | const uint32_t dst_pixel = dst[i]; |
| 246 | *src_pixel_ptr = BlendPixelWebP(*src_pixel_ptr, dst_pixel); |
| 247 | } |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | // Read animated WebP bitstream 'file_str' into 'AnimatedImage' struct. |
| 252 | static bool ReadAnimatedWebP(const char filename[], const std::string& file_str, |
| 253 | AnimatedImage* const image, bool dump_frames, |
| 254 | const char dump_folder[]) { |
| 255 | bool ok = true; |
| 256 | const WebPData webp_data = { |
| 257 | reinterpret_cast<const uint8_t*>(file_str.data()), file_str.size() |
| 258 | }; |
| 259 | WebPDemuxer* const demux = WebPDemux(&webp_data); |
| 260 | if (demux == NULL) return false; |
| 261 | |
| 262 | // Animation properties. |
| 263 | image->canvas_width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH); |
| 264 | image->canvas_height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT); |
| 265 | image->loop_count = WebPDemuxGetI(demux, WEBP_FF_LOOP_COUNT); |
| 266 | image->bgcolor = WebPDemuxGetI(demux, WEBP_FF_BACKGROUND_COLOR); |
| 267 | |
| 268 | const uint32_t frame_count = WebPDemuxGetI(demux, WEBP_FF_FRAME_COUNT); |
| 269 | const uint32_t canvas_width = image->canvas_width; |
| 270 | const uint32_t canvas_height = image->canvas_height; |
| 271 | |
| 272 | // Allocate frames. |
| 273 | AllocateFrames(image, frame_count); |
| 274 | |
| 275 | // Decode and reconstruct frames. |
| 276 | WebPIterator prev_iter = WebPIterator(); |
| 277 | WebPIterator curr_iter = WebPIterator(); |
| 278 | |
| 279 | for (uint32_t i = 0; i < frame_count; ++i) { |
| 280 | prev_iter = curr_iter; |
| 281 | |
| 282 | // Get frame. |
| 283 | if (!WebPDemuxGetFrame(demux, i + 1, &curr_iter)) { |
| 284 | fprintf(stderr, "Error retrieving frame #%u\n", i); |
| 285 | return false; |
| 286 | } |
| 287 | |
| 288 | DecodedFrame* const prev_frame = (i > 0) ? &image->frames[i - 1] : NULL; |
| 289 | uint8_t* const prev_rgba = |
| 290 | (prev_frame != NULL) ? prev_frame->rgba.data() : NULL; |
| 291 | DecodedFrame* const curr_frame = &image->frames[i]; |
| 292 | uint8_t* const curr_rgba = curr_frame->rgba.data(); |
| 293 | |
| 294 | curr_frame->duration = curr_iter.duration; |
| 295 | curr_frame->is_key_frame = IsKeyFrameWebP(curr_iter, prev_iter, prev_frame, |
| 296 | canvas_width, canvas_height); |
| 297 | |
| 298 | // TODO(urvang): The logic of decoding and reconstructing the next animated |
| 299 | // frame given the previous one should be a single library call (ideally a |
| 300 | // user-facing API), which takes care of frame disposal, blending etc. |
| 301 | |
| 302 | // Initialize. |
| 303 | if (curr_frame->is_key_frame) { |
| 304 | ZeroFillCanvas(curr_rgba, canvas_width, canvas_height); |
| 305 | } else { |
| 306 | CopyCanvas(prev_rgba, curr_rgba, canvas_width, canvas_height); |
| 307 | if (prev_iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) { |
| 308 | ZeroFillFrameRect(curr_rgba, canvas_width * kNumChannels, |
| 309 | prev_iter.x_offset, prev_iter.y_offset, |
| 310 | prev_iter.width, prev_iter.height); |
| 311 | } |
| 312 | } |
| 313 | |
| 314 | // Decode. |
| 315 | const uint8_t* input = curr_iter.fragment.bytes; |
| 316 | const size_t input_size = curr_iter.fragment.size; |
| 317 | const size_t output_offset = |
| 318 | (curr_iter.y_offset * canvas_width + curr_iter.x_offset) * kNumChannels; |
| 319 | uint8_t* output = curr_rgba + output_offset; |
| 320 | const int output_stride = kNumChannels * canvas_width; |
| 321 | const size_t output_size = output_stride * curr_iter.height; |
| 322 | |
| 323 | if (WebPDecodeRGBAInto(input, input_size, output, output_size, |
| 324 | output_stride) == NULL) { |
| 325 | ok = false; |
| 326 | break; |
| 327 | } |
| 328 | |
| 329 | // During the decoding of current frame, we may have set some pixels to be |
| 330 | // transparent (i.e. alpha < 255). However, the value of each of these |
| 331 | // pixels should have been determined by blending it against the value of |
| 332 | // that pixel in the previous frame if blending method of is WEBP_MUX_BLEND. |
| 333 | if (i > 0 && curr_iter.blend_method == WEBP_MUX_BLEND && |
| 334 | !curr_frame->is_key_frame) { |
| 335 | if (prev_iter.dispose_method == WEBP_MUX_DISPOSE_NONE) { |
| 336 | // Blend transparent pixels with pixels in previous canvas. |
| 337 | for (int y = 0; y < curr_iter.height; ++y) { |
| 338 | const size_t offset = |
| 339 | (curr_iter.y_offset + y) * canvas_width + curr_iter.x_offset; |
| 340 | BlendPixelRowWebP(reinterpret_cast<uint32_t*>(curr_rgba) + offset, |
| 341 | reinterpret_cast<uint32_t*>(prev_rgba) + offset, |
| 342 | curr_iter.width); |
| 343 | } |
| 344 | } else { |
| 345 | assert(prev_iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND); |
| 346 | // We need to blend a transparent pixel with its value just after |
| 347 | // initialization. That is, blend it with: |
| 348 | // * Fully transparent pixel if it belongs to prevRect <-- No-op. |
| 349 | // * The pixel in the previous canvas otherwise <-- Need alpha-blending. |
| 350 | for (int y = 0; y < curr_iter.height; ++y) { |
| 351 | const int canvas_y = curr_iter.y_offset + y; |
| 352 | int left1, width1, left2, width2; |
| 353 | FindBlendRangeAtRowWebP(&curr_iter, &prev_iter, canvas_y, &left1, |
| 354 | &width1, &left2, &width2); |
| 355 | if (width1 > 0) { |
| 356 | const size_t offset1 = canvas_y * canvas_width + left1; |
| 357 | BlendPixelRowWebP(reinterpret_cast<uint32_t*>(curr_rgba) + offset1, |
| 358 | reinterpret_cast<uint32_t*>(prev_rgba) + offset1, |
| 359 | width1); |
| 360 | } |
| 361 | if (width2 > 0) { |
| 362 | const size_t offset2 = canvas_y * canvas_width + left2; |
| 363 | BlendPixelRowWebP(reinterpret_cast<uint32_t*>(curr_rgba) + offset2, |
| 364 | reinterpret_cast<uint32_t*>(prev_rgba) + offset2, |
| 365 | width2); |
| 366 | } |
| 367 | } |
| 368 | } |
| 369 | } |
| 370 | |
| 371 | // Needed only because we may want to compare with GIF later. |
| 372 | CleanupTransparentPixels(reinterpret_cast<uint32_t*>(curr_rgba), |
| 373 | canvas_width, canvas_height); |
| 374 | |
| 375 | if (dump_frames) { |
| 376 | ok = ok && DumpFrame(filename, dump_folder, i, curr_rgba, |
| 377 | canvas_width, canvas_height); |
| 378 | } |
| 379 | } |
| 380 | WebPDemuxReleaseIterator(&prev_iter); |
| 381 | WebPDemuxReleaseIterator(&curr_iter); |
| 382 | WebPDemuxDelete(demux); |
| 383 | return ok; |
| 384 | } |
| 385 | |
| 386 | // ----------------------------------------------------------------------------- |
| 387 | // GIF Decoding. |
| 388 | |
| 389 | // Returns true if this is a valid GIF bitstream. |
| 390 | static bool IsGIF(const std::string& file_str) { |
| 391 | const char* const cstr = file_str.c_str(); |
| 392 | return file_str.length() > GIF_STAMP_LEN && |
| 393 | (!memcmp(GIF_STAMP, cstr, GIF_STAMP_LEN) || |
| 394 | !memcmp(GIF87_STAMP, cstr, GIF_STAMP_LEN) || |
| 395 | !memcmp(GIF89_STAMP, cstr, GIF_STAMP_LEN)); |
| 396 | } |
| 397 | |
| 398 | #ifdef WEBP_HAVE_GIF |
| 399 | |
| 400 | // GIFLIB_MAJOR is only defined in libgif >= 4.2.0. |
| 401 | #if defined(GIFLIB_MAJOR) && defined(GIFLIB_MINOR) |
| 402 | # define LOCAL_GIF_VERSION ((GIFLIB_MAJOR << 8) | GIFLIB_MINOR) |
| 403 | # define LOCAL_GIF_PREREQ(maj, min) \ |
| 404 | (LOCAL_GIF_VERSION >= (((maj) << 8) | (min))) |
| 405 | #else |
| 406 | # define LOCAL_GIF_VERSION 0 |
| 407 | # define LOCAL_GIF_PREREQ(maj, min) 0 |
| 408 | #endif |
| 409 | |
| 410 | #if !LOCAL_GIF_PREREQ(5, 0) |
| 411 | |
| 412 | // Added in v5.0 |
| 413 | typedef struct GraphicsControlBlock { |
| 414 | int DisposalMode; |
| 415 | #define DISPOSAL_UNSPECIFIED 0 // No disposal specified |
| 416 | #define DISPOSE_DO_NOT 1 // Leave image in place |
| 417 | #define DISPOSE_BACKGROUND 2 // Set area to background color |
| 418 | #define DISPOSE_PREVIOUS 3 // Restore to previous content |
| 419 | bool UserInputFlag; // User confirmation required before disposal |
| 420 | int DelayTime; // Pre-display delay in 0.01sec units |
| 421 | int TransparentColor; // Palette index for transparency, -1 if none |
| 422 | #define NO_TRANSPARENT_COLOR -1 |
| 423 | } GraphicsControlBlock; |
| 424 | |
| 425 | static int DGifExtensionToGCB(const size_t GifExtensionLength, |
| 426 | const GifByteType* GifExtension, |
| 427 | GraphicsControlBlock* gcb) { |
| 428 | if (GifExtensionLength != 4) { |
| 429 | return GIF_ERROR; |
| 430 | } |
| 431 | gcb->DisposalMode = (GifExtension[0] >> 2) & 0x07; |
| 432 | gcb->UserInputFlag = (GifExtension[0] & 0x02) != 0; |
| 433 | gcb->DelayTime = GifExtension[1] | (GifExtension[2] << 8); |
| 434 | if (GifExtension[0] & 0x01) { |
| 435 | gcb->TransparentColor = static_cast<int>(GifExtension[3]); |
| 436 | } else { |
| 437 | gcb->TransparentColor = NO_TRANSPARENT_COLOR; |
| 438 | } |
| 439 | return GIF_OK; |
| 440 | } |
| 441 | |
| 442 | static int DGifSavedExtensionToGCB(GifFileType* GifFile, int ImageIndex, |
| 443 | GraphicsControlBlock* gcb) { |
| 444 | int i; |
| 445 | if (ImageIndex < 0 || ImageIndex > GifFile->ImageCount - 1) { |
| 446 | return GIF_ERROR; |
| 447 | } |
| 448 | gcb->DisposalMode = DISPOSAL_UNSPECIFIED; |
| 449 | gcb->UserInputFlag = false; |
| 450 | gcb->DelayTime = 0; |
| 451 | gcb->TransparentColor = NO_TRANSPARENT_COLOR; |
| 452 | |
| 453 | for (i = 0; i < GifFile->SavedImages[ImageIndex].ExtensionBlockCount; i++) { |
| 454 | ExtensionBlock* ep = &GifFile->SavedImages[ImageIndex].ExtensionBlocks[i]; |
| 455 | if (ep->Function == GRAPHICS_EXT_FUNC_CODE) { |
| 456 | return DGifExtensionToGCB( |
| 457 | ep->ByteCount, reinterpret_cast<const GifByteType*>(ep->Bytes), gcb); |
| 458 | } |
| 459 | } |
| 460 | return GIF_ERROR; |
| 461 | } |
| 462 | |
| 463 | #define CONTINUE_EXT_FUNC_CODE 0x00 |
| 464 | |
| 465 | // Signature was changed in v5.0 |
| 466 | #define DGifOpenFileName(a, b) DGifOpenFileName(a) |
| 467 | |
| 468 | #endif // !LOCAL_GIF_PREREQ(5, 0) |
| 469 | |
| 470 | // Signature changed in v5.1 |
| 471 | #if !LOCAL_GIF_PREREQ(5, 1) |
| 472 | #define DGifCloseFile(a, b) DGifCloseFile(a) |
| 473 | #endif |
| 474 | |
| 475 | static void GIFDisplayError(const GifFileType* const gif, int gif_error) { |
| 476 | // libgif 4.2.0 has retired PrintGifError() and added GifErrorString(). |
| 477 | #if LOCAL_GIF_PREREQ(4, 2) |
| 478 | #if LOCAL_GIF_PREREQ(5, 0) |
| 479 | // Static string actually, hence the const char* cast. |
| 480 | const char* error_str = (const char*)GifErrorString( |
| 481 | (gif == NULL) ? gif_error : gif->Error); |
| 482 | #else |
| 483 | const char* error_str = (const char*)GifErrorString(); |
| 484 | (void)gif; |
| 485 | #endif |
| 486 | if (error_str == NULL) error_str = "Unknown error"; |
| 487 | fprintf(stderr, "GIFLib Error %d: %s\n", gif_error, error_str); |
| 488 | #else |
| 489 | (void)gif; |
| 490 | fprintf(stderr, "GIFLib Error %d: ", gif_error); |
| 491 | PrintGifError(); |
| 492 | fprintf(stderr, "\n"); |
| 493 | #endif |
| 494 | } |
| 495 | |
| 496 | static bool IsKeyFrameGIF(const GifImageDesc& prev_desc, int prev_dispose, |
| 497 | const DecodedFrame* const prev_frame, |
| 498 | int canvas_width, int canvas_height) { |
| 499 | if (prev_frame == NULL) return true; |
| 500 | if (prev_dispose == DISPOSE_BACKGROUND) { |
| 501 | if (IsFullFrame(prev_desc.Width, prev_desc.Height, |
| 502 | canvas_width, canvas_height)) { |
| 503 | return true; |
| 504 | } |
| 505 | if (prev_frame->is_key_frame) return true; |
| 506 | } |
| 507 | return false; |
| 508 | } |
| 509 | |
| 510 | static int GetTransparentIndexGIF(GifFileType* gif) { |
| 511 | GraphicsControlBlock first_gcb = GraphicsControlBlock(); |
| 512 | DGifSavedExtensionToGCB(gif, 0, &first_gcb); |
| 513 | return first_gcb.TransparentColor; |
| 514 | } |
| 515 | |
| 516 | static uint32_t GetBackgroundColorGIF(GifFileType* gif) { |
| 517 | const int transparent_index = GetTransparentIndexGIF(gif); |
| 518 | const ColorMapObject* const color_map = gif->SColorMap; |
| 519 | if (transparent_index != NO_TRANSPARENT_COLOR && |
| 520 | gif->SBackGroundColor == transparent_index) { |
| 521 | return 0x00ffffff; // Special case: transparent white. |
| 522 | } else if (color_map == NULL || color_map->Colors == NULL |
| 523 | || gif->SBackGroundColor >= color_map->ColorCount) { |
| 524 | return 0xffffffff; // Invalid: assume white. |
| 525 | } else { |
| 526 | const GifColorType color = color_map->Colors[gif->SBackGroundColor]; |
| 527 | return (0xff << 24) | |
| 528 | (color.Red << 16) | |
| 529 | (color.Green << 8) | |
| 530 | (color.Blue << 0); |
| 531 | } |
| 532 | } |
| 533 | |
| 534 | // Find appropriate app extension and get loop count from the next extension. |
| 535 | static uint32_t GetLoopCountGIF(const GifFileType* const gif) { |
| 536 | for (int i = 0; i < gif->ImageCount; ++i) { |
| 537 | const SavedImage* const image = &gif->SavedImages[i]; |
| 538 | for (int j = 0; (j + 1) < image->ExtensionBlockCount; ++j) { |
| 539 | const ExtensionBlock* const eb1 = image->ExtensionBlocks + j; |
| 540 | const ExtensionBlock* const eb2 = image->ExtensionBlocks + j + 1; |
| 541 | const char* const signature = reinterpret_cast<const char*>(eb1->Bytes); |
| 542 | const bool signature_is_ok = |
| 543 | (eb1->Function == APPLICATION_EXT_FUNC_CODE) && |
| 544 | (eb1->ByteCount == 11) && |
| 545 | (!memcmp(signature, "NETSCAPE2.0", 11) || |
| 546 | !memcmp(signature, "ANIMEXTS1.0", 11)); |
| 547 | if (signature_is_ok && |
| 548 | eb2->Function == CONTINUE_EXT_FUNC_CODE && eb2->ByteCount >= 3 && |
| 549 | eb2->Bytes[0] == 1) { |
| 550 | return (static_cast<uint32_t>(eb2->Bytes[2]) << 8) + |
| 551 | (static_cast<uint32_t>(eb2->Bytes[1]) << 0); |
| 552 | } |
| 553 | } |
| 554 | } |
| 555 | return 0; // Default. |
| 556 | } |
| 557 | |
| 558 | // Get duration of 'n'th frame in milliseconds. |
| 559 | static int GetFrameDurationGIF(GifFileType* gif, int n) { |
| 560 | GraphicsControlBlock gcb = GraphicsControlBlock(); |
| 561 | DGifSavedExtensionToGCB(gif, n, &gcb); |
| 562 | return gcb.DelayTime * 10; |
| 563 | } |
| 564 | |
| 565 | // Returns true if frame 'target' completely covers 'covered'. |
| 566 | static bool CoversFrameGIF(const GifImageDesc& target, |
| 567 | const GifImageDesc& covered) { |
| 568 | return target.Left <= covered.Left && |
| 569 | covered.Left + covered.Width <= target.Left + target.Width && |
| 570 | target.Top <= covered.Top && |
| 571 | covered.Top + covered.Height <= target.Top + target.Height; |
| 572 | } |
| 573 | |
| 574 | static void RemapPixelsGIF(const uint8_t* const src, |
| 575 | const ColorMapObject* const cmap, |
| 576 | int transparent_color, int len, uint8_t* dst) { |
| 577 | int i; |
| 578 | for (i = 0; i < len; ++i) { |
| 579 | if (src[i] != transparent_color) { |
| 580 | // If a pixel in the current frame is transparent, we don't modify it, so |
| 581 | // that we can see-through the corresponding pixel from an earlier frame. |
| 582 | const GifColorType c = cmap->Colors[src[i]]; |
| 583 | dst[4 * i + 0] = c.Red; |
| 584 | dst[4 * i + 1] = c.Green; |
| 585 | dst[4 * i + 2] = c.Blue; |
| 586 | dst[4 * i + 3] = 0xff; |
| 587 | } |
| 588 | } |
| 589 | } |
| 590 | |
| 591 | static bool ReadFrameGIF(const SavedImage* const gif_image, |
| 592 | const ColorMapObject* cmap, int transparent_color, |
| 593 | int out_stride, uint8_t* const dst) { |
| 594 | const GifImageDesc& image_desc = gif_image->ImageDesc; |
| 595 | if (image_desc.ColorMap) { |
| 596 | cmap = image_desc.ColorMap; |
| 597 | } |
| 598 | |
| 599 | if (cmap == NULL || cmap->ColorCount != (1 << cmap->BitsPerPixel)) { |
| 600 | fprintf(stderr, "Potentially corrupt color map.\n"); |
| 601 | return false; |
| 602 | } |
| 603 | |
| 604 | const uint8_t* in = reinterpret_cast<uint8_t*>(gif_image->RasterBits); |
| 605 | uint8_t* out = |
| 606 | dst + image_desc.Top * out_stride + image_desc.Left * kNumChannels; |
| 607 | |
| 608 | for (int j = 0; j < image_desc.Height; ++j) { |
| 609 | RemapPixelsGIF(in, cmap, transparent_color, image_desc.Width, out); |
| 610 | in += image_desc.Width; |
| 611 | out += out_stride; |
| 612 | } |
| 613 | return true; |
| 614 | } |
| 615 | |
| 616 | // Read animated GIF bitstream from 'filename' into 'AnimatedImage' struct. |
| 617 | static bool ReadAnimatedGIF(const char filename[], AnimatedImage* const image, |
| 618 | bool dump_frames, const char dump_folder[]) { |
| 619 | GifFileType* gif = DGifOpenFileName(filename, NULL); |
| 620 | if (gif == NULL) { |
| 621 | fprintf(stderr, "Could not read file: %s.\n", filename); |
| 622 | return false; |
| 623 | } |
| 624 | |
| 625 | const int gif_error = DGifSlurp(gif); |
| 626 | if (gif_error != GIF_OK) { |
| 627 | fprintf(stderr, "Could not parse image: %s.\n", filename); |
| 628 | GIFDisplayError(gif, gif_error); |
| 629 | DGifCloseFile(gif, NULL); |
| 630 | return false; |
| 631 | } |
| 632 | |
| 633 | // Animation properties. |
| 634 | image->canvas_width = static_cast<uint32_t>(gif->SWidth); |
| 635 | image->canvas_height = static_cast<uint32_t>(gif->SHeight); |
| 636 | if (image->canvas_width > MAX_CANVAS_SIZE || |
| 637 | image->canvas_height > MAX_CANVAS_SIZE) { |
| 638 | fprintf(stderr, "Invalid canvas dimension: %d x %d\n", |
| 639 | image->canvas_width, image->canvas_height); |
| 640 | DGifCloseFile(gif, NULL); |
| 641 | return false; |
| 642 | } |
| 643 | image->loop_count = GetLoopCountGIF(gif); |
| 644 | image->bgcolor = GetBackgroundColorGIF(gif); |
| 645 | |
| 646 | const uint32_t frame_count = static_cast<uint32_t>(gif->ImageCount); |
| 647 | if (frame_count == 0) { |
| 648 | DGifCloseFile(gif, NULL); |
| 649 | return false; |
| 650 | } |
| 651 | |
| 652 | if (image->canvas_width == 0 || image->canvas_height == 0) { |
| 653 | image->canvas_width = gif->SavedImages[0].ImageDesc.Width; |
| 654 | image->canvas_height = gif->SavedImages[0].ImageDesc.Height; |
| 655 | gif->SavedImages[0].ImageDesc.Left = 0; |
| 656 | gif->SavedImages[0].ImageDesc.Top = 0; |
| 657 | if (image->canvas_width == 0 || image->canvas_height == 0) { |
| 658 | fprintf(stderr, "Invalid canvas size in GIF.\n"); |
| 659 | DGifCloseFile(gif, NULL); |
| 660 | return false; |
| 661 | } |
| 662 | } |
| 663 | // Allocate frames. |
| 664 | AllocateFrames(image, frame_count); |
| 665 | |
| 666 | const uint32_t canvas_width = image->canvas_width; |
| 667 | const uint32_t canvas_height = image->canvas_height; |
| 668 | |
| 669 | // Decode and reconstruct frames. |
| 670 | for (uint32_t i = 0; i < frame_count; ++i) { |
| 671 | const int canvas_width_in_bytes = canvas_width * kNumChannels; |
| 672 | const SavedImage* const curr_gif_image = &gif->SavedImages[i]; |
| 673 | GraphicsControlBlock curr_gcb = GraphicsControlBlock(); |
| 674 | DGifSavedExtensionToGCB(gif, i, &curr_gcb); |
| 675 | |
| 676 | DecodedFrame* const curr_frame = &image->frames[i]; |
| 677 | uint8_t* const curr_rgba = curr_frame->rgba.data(); |
| 678 | curr_frame->duration = GetFrameDurationGIF(gif, i); |
| 679 | |
| 680 | if (i == 0) { // Initialize as transparent. |
| 681 | curr_frame->is_key_frame = true; |
| 682 | ZeroFillCanvas(curr_rgba, canvas_width, canvas_height); |
| 683 | } else { |
| 684 | DecodedFrame* const prev_frame = &image->frames[i - 1]; |
| 685 | const GifImageDesc& prev_desc = gif->SavedImages[i - 1].ImageDesc; |
| 686 | GraphicsControlBlock prev_gcb = GraphicsControlBlock(); |
| 687 | DGifSavedExtensionToGCB(gif, i - 1, &prev_gcb); |
| 688 | |
| 689 | curr_frame->is_key_frame = |
| 690 | IsKeyFrameGIF(prev_desc, prev_gcb.DisposalMode, prev_frame, |
| 691 | canvas_width, canvas_height); |
| 692 | |
| 693 | if (curr_frame->is_key_frame) { // Initialize as transparent. |
| 694 | ZeroFillCanvas(curr_rgba, canvas_width, canvas_height); |
| 695 | } else { |
| 696 | // Initialize with previous canvas. |
| 697 | uint8_t* const prev_rgba = image->frames[i - 1].rgba.data(); |
| 698 | CopyCanvas(prev_rgba, curr_rgba, canvas_width, canvas_height); |
| 699 | |
| 700 | // Dispose previous frame rectangle. |
| 701 | bool prev_frame_disposed = |
| 702 | (prev_gcb.DisposalMode == DISPOSE_BACKGROUND || |
| 703 | prev_gcb.DisposalMode == DISPOSE_PREVIOUS); |
| 704 | bool curr_frame_opaque = |
| 705 | (curr_gcb.TransparentColor == NO_TRANSPARENT_COLOR); |
| 706 | bool prev_frame_completely_covered = |
| 707 | curr_frame_opaque && |
| 708 | CoversFrameGIF(curr_gif_image->ImageDesc, prev_desc); |
| 709 | |
| 710 | if (prev_frame_disposed && !prev_frame_completely_covered) { |
| 711 | switch (prev_gcb.DisposalMode) { |
| 712 | case DISPOSE_BACKGROUND: { |
| 713 | ZeroFillFrameRect(curr_rgba, canvas_width_in_bytes, |
| 714 | prev_desc.Left, prev_desc.Top, |
| 715 | prev_desc.Width, prev_desc.Height); |
| 716 | break; |
| 717 | } |
| 718 | case DISPOSE_PREVIOUS: { |
| 719 | int src_frame_num = i - 2; |
| 720 | while (src_frame_num >= 0) { |
| 721 | GraphicsControlBlock src_frame_gcb = GraphicsControlBlock(); |
| 722 | DGifSavedExtensionToGCB(gif, src_frame_num, &src_frame_gcb); |
| 723 | if (src_frame_gcb.DisposalMode != DISPOSE_PREVIOUS) break; |
| 724 | --src_frame_num; |
| 725 | } |
| 726 | if (src_frame_num >= 0) { |
| 727 | // Restore pixels inside previous frame rectangle to |
| 728 | // corresponding pixels in source canvas. |
| 729 | uint8_t* const src_frame_rgba = |
| 730 | image->frames[src_frame_num].rgba.data(); |
| 731 | CopyFrameRectangle(src_frame_rgba, curr_rgba, |
| 732 | canvas_width_in_bytes, |
| 733 | prev_desc.Left, prev_desc.Top, |
| 734 | prev_desc.Width, prev_desc.Height); |
| 735 | } else { |
| 736 | // Source canvas doesn't exist. So clear previous frame |
| 737 | // rectangle to background. |
| 738 | ZeroFillFrameRect(curr_rgba, canvas_width_in_bytes, |
| 739 | prev_desc.Left, prev_desc.Top, |
| 740 | prev_desc.Width, prev_desc.Height); |
| 741 | } |
| 742 | break; |
| 743 | } |
| 744 | default: |
| 745 | break; // Nothing to do. |
| 746 | } |
| 747 | } |
| 748 | } |
| 749 | } |
| 750 | |
| 751 | // Decode current frame. |
| 752 | if (!ReadFrameGIF(curr_gif_image, gif->SColorMap, curr_gcb.TransparentColor, |
| 753 | canvas_width_in_bytes, curr_rgba)) { |
| 754 | DGifCloseFile(gif, NULL); |
| 755 | return false; |
| 756 | } |
| 757 | |
| 758 | if (dump_frames) { |
| 759 | if (!DumpFrame(filename, dump_folder, i, curr_rgba, |
| 760 | canvas_width, canvas_height)) { |
| 761 | DGifCloseFile(gif, NULL); |
| 762 | return false; |
| 763 | } |
| 764 | } |
| 765 | } |
| 766 | DGifCloseFile(gif, NULL); |
| 767 | return true; |
| 768 | } |
| 769 | |
| 770 | #else |
| 771 | |
| 772 | static bool ReadAnimatedGIF(const char filename[], AnimatedImage* const image, |
| 773 | bool dump_frames, const char dump_folder[]) { |
| 774 | (void)filename; |
| 775 | (void)image; |
| 776 | (void)dump_frames; |
| 777 | (void)dump_folder; |
| 778 | fprintf(stderr, "GIF support not compiled. Please install the libgif-dev " |
| 779 | "package before building.\n"); |
| 780 | return false; |
| 781 | } |
| 782 | |
| 783 | #endif // WEBP_HAVE_GIF |
| 784 | |
| 785 | // ----------------------------------------------------------------------------- |
| 786 | |
| 787 | static bool ReadFile(const char filename[], std::string* filestr) { |
| 788 | ifstream fin(filename, ios::binary); |
| 789 | if (!fin.good()) return false; |
| 790 | ostringstream strout; |
| 791 | strout << fin.rdbuf(); |
| 792 | *filestr = strout.str(); |
| 793 | fin.close(); |
| 794 | return true; |
| 795 | } |
| 796 | |
| 797 | bool ReadAnimatedImage(const char filename[], AnimatedImage* const image, |
| 798 | bool dump_frames, const char dump_folder[]) { |
| 799 | std::string file_str; |
| 800 | if (!ReadFile(filename, &file_str)) { |
| 801 | fprintf(stderr, "Error reading file: %s\n", filename); |
| 802 | return false; |
| 803 | } |
| 804 | |
| 805 | if (IsWebP(file_str)) { |
| 806 | return ReadAnimatedWebP(filename, file_str, image, dump_frames, |
| 807 | dump_folder); |
| 808 | } else if (IsGIF(file_str)) { |
| 809 | return ReadAnimatedGIF(filename, image, dump_frames, dump_folder); |
| 810 | } else { |
| 811 | fprintf(stderr, |
| 812 | "Unknown file type: %s. Supported file types are WebP and GIF\n", |
| 813 | filename); |
| 814 | return false; |
| 815 | } |
| 816 | } |
| 817 | |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 818 | static void Accumulate(double v1, double v2, double* const max_diff, |
| 819 | double* const sse) { |
| 820 | const double diff = fabs(v1 - v2); |
| 821 | if (diff > *max_diff) *max_diff = diff; |
| 822 | *sse += diff * diff; |
| 823 | } |
| 824 | |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 825 | void GetDiffAndPSNR(const uint8_t rgba1[], const uint8_t rgba2[], |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 826 | uint32_t width, uint32_t height, bool premultiply, |
| 827 | int* const max_diff, double* const psnr) { |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 828 | const uint32_t stride = width * kNumChannels; |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 829 | const int kAlphaChannel = kNumChannels - 1; |
| 830 | double f_max_diff = 0.; |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 831 | double sse = 0.; |
| 832 | for (uint32_t y = 0; y < height; ++y) { |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 833 | for (uint32_t x = 0; x < stride; x += kNumChannels) { |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 834 | const size_t offset = y * stride + x; |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 835 | const int alpha1 = rgba1[offset + kAlphaChannel]; |
| 836 | const int alpha2 = rgba2[offset + kAlphaChannel]; |
| 837 | Accumulate(alpha1, alpha2, &f_max_diff, &sse); |
| 838 | if (!premultiply) { |
| 839 | for (int k = 0; k < kAlphaChannel; ++k) { |
| 840 | Accumulate(rgba1[offset + k], rgba2[offset + k], &f_max_diff, &sse); |
| 841 | } |
| 842 | } else { |
| 843 | // premultiply R/G/B channels with alpha value |
| 844 | for (int k = 0; k < kAlphaChannel; ++k) { |
| 845 | Accumulate(rgba1[offset + k] * alpha1 / 255., |
| 846 | rgba2[offset + k] * alpha2 / 255., |
| 847 | &f_max_diff, &sse); |
| 848 | } |
| 849 | } |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 850 | } |
| 851 | } |
Pascal Massimino | acb297e | 2015-07-07 22:45:49 +0000 | [diff] [blame] | 852 | *max_diff = static_cast<int>(f_max_diff); |
Urvang Joshi | acd7b5a | 2015-05-01 16:11:49 -0700 | [diff] [blame] | 853 | if (*max_diff == 0) { |
| 854 | *psnr = 99.; // PSNR when images are identical. |
| 855 | } else { |
| 856 | sse /= stride * height; |
| 857 | *psnr = 10. * log10(255. * 255. / sse); |
| 858 | } |
| 859 | } |