1 | use byteorder_lite::{LittleEndian, ReadBytesExt}; |
2 | use quick_error::quick_error; |
3 | |
4 | use std::collections::HashMap; |
5 | use std::io::{self, BufRead, Cursor, Read, Seek}; |
6 | use std::num::NonZeroU16; |
7 | use std::ops::Range; |
8 | |
9 | use crate::extended::{self, get_alpha_predictor, read_alpha_chunk, WebPExtendedInfo}; |
10 | |
11 | use super::lossless::LosslessDecoder; |
12 | use super::vp8::Vp8Decoder; |
13 | |
14 | quick_error! { |
15 | /// Errors that can occur when attempting to decode a WebP image |
16 | #[derive (Debug)] |
17 | #[non_exhaustive ] |
18 | pub enum DecodingError { |
19 | /// An IO error occurred while reading the file |
20 | IoError(err: io::Error) { |
21 | from() |
22 | display("IO Error: {}" , err) |
23 | source(err) |
24 | } |
25 | |
26 | /// RIFF's "RIFF" signature not found or invalid |
27 | RiffSignatureInvalid(err: [u8; 4]) { |
28 | display("Invalid RIFF signature: {err:x?}" ) |
29 | } |
30 | |
31 | /// WebP's "WEBP" signature not found or invalid |
32 | WebpSignatureInvalid(err: [u8; 4]) { |
33 | display("Invalid WebP signature: {err:x?}" ) |
34 | } |
35 | |
36 | /// An expected chunk was missing |
37 | ChunkMissing { |
38 | display("An expected chunk was missing" ) |
39 | } |
40 | |
41 | /// Chunk Header was incorrect or invalid in its usage |
42 | ChunkHeaderInvalid(err: [u8; 4]) { |
43 | display("Invalid Chunk header: {err:x?}" ) |
44 | } |
45 | |
46 | #[allow (deprecated)] |
47 | #[deprecated ] |
48 | /// Some bits were invalid |
49 | ReservedBitSet { |
50 | display("Reserved bits set" ) |
51 | } |
52 | |
53 | /// The ALPH chunk preprocessing info flag was invalid |
54 | InvalidAlphaPreprocessing { |
55 | display("Alpha chunk preprocessing flag invalid" ) |
56 | } |
57 | |
58 | /// Invalid compression method |
59 | InvalidCompressionMethod { |
60 | display("Invalid compression method" ) |
61 | } |
62 | |
63 | /// Alpha chunk doesn't match the frame's size |
64 | AlphaChunkSizeMismatch { |
65 | display("Alpha chunk size mismatch" ) |
66 | } |
67 | |
68 | /// Image is too large, either for the platform's pointer size or generally |
69 | ImageTooLarge { |
70 | display("Image too large" ) |
71 | } |
72 | |
73 | /// Frame would go out of the canvas |
74 | FrameOutsideImage { |
75 | display("Frame outside image" ) |
76 | } |
77 | |
78 | /// Signature of 0x2f not found |
79 | LosslessSignatureInvalid(err: u8) { |
80 | display("Invalid lossless signature: {err:x?}" ) |
81 | } |
82 | |
83 | /// Version Number was not zero |
84 | VersionNumberInvalid(err: u8) { |
85 | display("Invalid lossless version number: {err}" ) |
86 | } |
87 | |
88 | /// Invalid color cache bits |
89 | InvalidColorCacheBits(err: u8) { |
90 | display("Invalid color cache bits: {err}" ) |
91 | } |
92 | |
93 | /// An invalid Huffman code was encountered |
94 | HuffmanError { |
95 | display("Invalid Huffman code" ) |
96 | } |
97 | |
98 | /// The bitstream was somehow corrupt |
99 | BitStreamError { |
100 | display("Corrupt bitstream" ) |
101 | } |
102 | |
103 | /// The transforms specified were invalid |
104 | TransformError { |
105 | display("Invalid transform" ) |
106 | } |
107 | |
108 | /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid |
109 | Vp8MagicInvalid(err: [u8; 3]) { |
110 | display("Invalid VP8 magic: {err:x?}" ) |
111 | } |
112 | |
113 | /// VP8 Decoder initialisation wasn't provided with enough data |
114 | NotEnoughInitData { |
115 | display("Not enough VP8 init data" ) |
116 | } |
117 | |
118 | /// At time of writing, only the YUV colour-space encoded as `0` is specified |
119 | ColorSpaceInvalid(err: u8) { |
120 | display("Invalid VP8 color space: {err}" ) |
121 | } |
122 | |
123 | /// LUMA prediction mode was not recognised |
124 | LumaPredictionModeInvalid(err: i8) { |
125 | display("Invalid VP8 luma prediction mode: {err}" ) |
126 | } |
127 | |
128 | /// Intra-prediction mode was not recognised |
129 | IntraPredictionModeInvalid(err: i8) { |
130 | display("Invalid VP8 intra prediction mode: {err}" ) |
131 | } |
132 | |
133 | /// Chroma prediction mode was not recognised |
134 | ChromaPredictionModeInvalid(err: i8) { |
135 | display("Invalid VP8 chroma prediction mode: {err}" ) |
136 | } |
137 | |
138 | /// Inconsistent image sizes |
139 | InconsistentImageSizes { |
140 | display("Inconsistent image sizes" ) |
141 | } |
142 | |
143 | /// The file may be valid, but this crate doesn't support decoding it. |
144 | UnsupportedFeature(err: String) { |
145 | display("Unsupported feature: {err}" ) |
146 | } |
147 | |
148 | /// Invalid function call or parameter |
149 | InvalidParameter(err: String) { |
150 | display("Invalid parameter: {err}" ) |
151 | } |
152 | |
153 | /// Memory limit exceeded |
154 | MemoryLimitExceeded { |
155 | display("Memory limit exceeded" ) |
156 | } |
157 | |
158 | /// Invalid chunk size |
159 | InvalidChunkSize { |
160 | display("Invalid chunk size" ) |
161 | } |
162 | |
163 | /// No more frames in image |
164 | NoMoreFrames { |
165 | display("No more frames" ) |
166 | } |
167 | } |
168 | } |
169 | |
170 | /// All possible RIFF chunks in a WebP image file |
171 | #[allow (clippy::upper_case_acronyms)] |
172 | #[derive (Debug, Clone, Copy, PartialEq, Hash, Eq)] |
173 | pub(crate) enum WebPRiffChunk { |
174 | RIFF, |
175 | WEBP, |
176 | VP8, |
177 | VP8L, |
178 | VP8X, |
179 | ANIM, |
180 | ANMF, |
181 | ALPH, |
182 | ICCP, |
183 | EXIF, |
184 | XMP, |
185 | Unknown([u8; 4]), |
186 | } |
187 | |
188 | impl WebPRiffChunk { |
189 | pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self { |
190 | match &chunk_fourcc { |
191 | b"RIFF" => Self::RIFF, |
192 | b"WEBP" => Self::WEBP, |
193 | b"VP8 " => Self::VP8, |
194 | b"VP8L" => Self::VP8L, |
195 | b"VP8X" => Self::VP8X, |
196 | b"ANIM" => Self::ANIM, |
197 | b"ANMF" => Self::ANMF, |
198 | b"ALPH" => Self::ALPH, |
199 | b"ICCP" => Self::ICCP, |
200 | b"EXIF" => Self::EXIF, |
201 | b"XMP " => Self::XMP, |
202 | _ => Self::Unknown(chunk_fourcc), |
203 | } |
204 | } |
205 | |
206 | pub(crate) const fn to_fourcc(self) -> [u8; 4] { |
207 | match self { |
208 | Self::RIFF => *b"RIFF" , |
209 | Self::WEBP => *b"WEBP" , |
210 | Self::VP8 => *b"VP8 " , |
211 | Self::VP8L => *b"VP8L" , |
212 | Self::VP8X => *b"VP8X" , |
213 | Self::ANIM => *b"ANIM" , |
214 | Self::ANMF => *b"ANMF" , |
215 | Self::ALPH => *b"ALPH" , |
216 | Self::ICCP => *b"ICCP" , |
217 | Self::EXIF => *b"EXIF" , |
218 | Self::XMP => *b"XMP " , |
219 | Self::Unknown(fourcc) => fourcc, |
220 | } |
221 | } |
222 | |
223 | pub(crate) const fn is_unknown(self) -> bool { |
224 | matches!(self, Self::Unknown(_)) |
225 | } |
226 | } |
227 | |
228 | // enum WebPImage { |
229 | // Lossy(VP8Frame), |
230 | // Lossless(LosslessFrame), |
231 | // Extended(ExtendedImage), |
232 | // } |
233 | |
234 | enum ImageKind { |
235 | Lossy, |
236 | Lossless, |
237 | Extended(WebPExtendedInfo), |
238 | } |
239 | |
240 | struct AnimationState { |
241 | next_frame: u32, |
242 | next_frame_start: u64, |
243 | dispose_next_frame: bool, |
244 | previous_frame_width: u32, |
245 | previous_frame_height: u32, |
246 | previous_frame_x_offset: u32, |
247 | previous_frame_y_offset: u32, |
248 | canvas: Option<Vec<u8>>, |
249 | } |
250 | impl Default for AnimationState { |
251 | fn default() -> Self { |
252 | Self { |
253 | next_frame: 0, |
254 | next_frame_start: 0, |
255 | dispose_next_frame: true, |
256 | previous_frame_width: 0, |
257 | previous_frame_height: 0, |
258 | previous_frame_x_offset: 0, |
259 | previous_frame_y_offset: 0, |
260 | canvas: None, |
261 | } |
262 | } |
263 | } |
264 | |
265 | /// Number of times that an animation loops. |
266 | #[derive (Copy, Clone, Debug, Eq, PartialEq)] |
267 | pub enum LoopCount { |
268 | /// The animation loops forever. |
269 | Forever, |
270 | /// Each frame of the animation is displayed the specified number of times. |
271 | Times(NonZeroU16), |
272 | } |
273 | |
274 | /// WebP image format decoder. |
275 | pub struct WebPDecoder<R> { |
276 | r: R, |
277 | memory_limit: usize, |
278 | |
279 | width: u32, |
280 | height: u32, |
281 | |
282 | kind: ImageKind, |
283 | animation: AnimationState, |
284 | |
285 | is_lossy: bool, |
286 | has_alpha: bool, |
287 | num_frames: u32, |
288 | loop_count: LoopCount, |
289 | loop_duration: u64, |
290 | |
291 | chunks: HashMap<WebPRiffChunk, Range<u64>>, |
292 | } |
293 | |
294 | impl<R: BufRead + Seek> WebPDecoder<R> { |
295 | /// Create a new `WebPDecoder` from the reader `r`. The decoder performs many small reads, so the |
296 | /// reader should be buffered. |
297 | pub fn new(r: R) -> Result<Self, DecodingError> { |
298 | let mut decoder = Self { |
299 | r, |
300 | width: 0, |
301 | height: 0, |
302 | num_frames: 0, |
303 | kind: ImageKind::Lossy, |
304 | chunks: HashMap::new(), |
305 | animation: Default::default(), |
306 | memory_limit: usize::MAX, |
307 | is_lossy: false, |
308 | has_alpha: false, |
309 | loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()), |
310 | loop_duration: 0, |
311 | }; |
312 | decoder.read_data()?; |
313 | Ok(decoder) |
314 | } |
315 | |
316 | fn read_data(&mut self) -> Result<(), DecodingError> { |
317 | let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else { |
318 | return Err(DecodingError::ChunkHeaderInvalid(*b"RIFF" )); |
319 | }; |
320 | |
321 | match &read_fourcc(&mut self.r)? { |
322 | WebPRiffChunk::WEBP => {} |
323 | fourcc => return Err(DecodingError::WebpSignatureInvalid(fourcc.to_fourcc())), |
324 | } |
325 | |
326 | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; |
327 | let start = self.r.stream_position()?; |
328 | |
329 | match chunk { |
330 | WebPRiffChunk::VP8 => { |
331 | let tag = self.r.read_u24::<LittleEndian>()?; |
332 | |
333 | let keyframe = tag & 1 == 0; |
334 | if !keyframe { |
335 | return Err(DecodingError::UnsupportedFeature( |
336 | "Non-keyframe frames" .to_owned(), |
337 | )); |
338 | } |
339 | |
340 | let mut tag = [0u8; 3]; |
341 | self.r.read_exact(&mut tag)?; |
342 | if tag != [0x9d, 0x01, 0x2a] { |
343 | return Err(DecodingError::Vp8MagicInvalid(tag)); |
344 | } |
345 | |
346 | let w = self.r.read_u16::<LittleEndian>()?; |
347 | let h = self.r.read_u16::<LittleEndian>()?; |
348 | |
349 | self.width = u32::from(w & 0x3FFF); |
350 | self.height = u32::from(h & 0x3FFF); |
351 | if self.width == 0 || self.height == 0 { |
352 | return Err(DecodingError::InconsistentImageSizes); |
353 | } |
354 | |
355 | self.chunks |
356 | .insert(WebPRiffChunk::VP8, start..start + chunk_size); |
357 | self.kind = ImageKind::Lossy; |
358 | self.is_lossy = true; |
359 | } |
360 | WebPRiffChunk::VP8L => { |
361 | let signature = self.r.read_u8()?; |
362 | if signature != 0x2f { |
363 | return Err(DecodingError::LosslessSignatureInvalid(signature)); |
364 | } |
365 | |
366 | let header = self.r.read_u32::<LittleEndian>()?; |
367 | let version = header >> 29; |
368 | if version != 0 { |
369 | return Err(DecodingError::VersionNumberInvalid(version as u8)); |
370 | } |
371 | |
372 | self.width = (1 + header) & 0x3FFF; |
373 | self.height = (1 + (header >> 14)) & 0x3FFF; |
374 | self.chunks |
375 | .insert(WebPRiffChunk::VP8L, start..start + chunk_size); |
376 | self.kind = ImageKind::Lossless; |
377 | self.has_alpha = (header >> 28) & 1 != 0; |
378 | } |
379 | WebPRiffChunk::VP8X => { |
380 | let mut info = extended::read_extended_header(&mut self.r)?; |
381 | self.width = info.canvas_width; |
382 | self.height = info.canvas_height; |
383 | |
384 | let mut position = start + chunk_size_rounded; |
385 | let max_position = position + riff_size.saturating_sub(12); |
386 | self.r.seek(io::SeekFrom::Start(position))?; |
387 | |
388 | while position < max_position { |
389 | match read_chunk_header(&mut self.r) { |
390 | Ok((chunk, chunk_size, chunk_size_rounded)) => { |
391 | let range = position + 8..position + 8 + chunk_size; |
392 | position += 8 + chunk_size_rounded; |
393 | |
394 | if !chunk.is_unknown() { |
395 | self.chunks.entry(chunk).or_insert(range); |
396 | } |
397 | |
398 | if chunk == WebPRiffChunk::ANMF { |
399 | self.num_frames += 1; |
400 | if chunk_size < 24 { |
401 | return Err(DecodingError::InvalidChunkSize); |
402 | } |
403 | |
404 | self.r.seek_relative(12)?; |
405 | let duration = self.r.read_u32::<LittleEndian>()? & 0xffffff; |
406 | self.loop_duration = |
407 | self.loop_duration.wrapping_add(u64::from(duration)); |
408 | |
409 | // If the image is animated, the image data chunk will be inside the |
410 | // ANMF chunks, so we must inspect them to determine whether the |
411 | // image contains any lossy image data. VP8 chunks store lossy data |
412 | // and the spec says that lossless images SHOULD NOT contain ALPH |
413 | // chunks, so we treat both as indicators of lossy images. |
414 | if !self.is_lossy { |
415 | let (subchunk, ..) = read_chunk_header(&mut self.r)?; |
416 | if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk { |
417 | self.is_lossy = true; |
418 | } |
419 | self.r.seek_relative(chunk_size_rounded as i64 - 24)?; |
420 | } else { |
421 | self.r.seek_relative(chunk_size_rounded as i64 - 16)?; |
422 | } |
423 | |
424 | continue; |
425 | } |
426 | |
427 | self.r.seek_relative(chunk_size_rounded as i64)?; |
428 | } |
429 | Err(DecodingError::IoError(e)) |
430 | if e.kind() == io::ErrorKind::UnexpectedEof => |
431 | { |
432 | break; |
433 | } |
434 | Err(e) => return Err(e), |
435 | } |
436 | } |
437 | self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8); |
438 | |
439 | if info.animation |
440 | && (!self.chunks.contains_key(&WebPRiffChunk::ANIM) |
441 | || !self.chunks.contains_key(&WebPRiffChunk::ANMF)) |
442 | || info.icc_profile && !self.chunks.contains_key(&WebPRiffChunk::ICCP) |
443 | || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF) |
444 | || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP) |
445 | || !info.animation |
446 | && self.chunks.contains_key(&WebPRiffChunk::VP8) |
447 | == self.chunks.contains_key(&WebPRiffChunk::VP8L) |
448 | { |
449 | return Err(DecodingError::ChunkMissing); |
450 | } |
451 | |
452 | // Decode ANIM chunk. |
453 | if info.animation { |
454 | match self.read_chunk(WebPRiffChunk::ANIM, 6) { |
455 | Ok(Some(chunk)) => { |
456 | let mut cursor = Cursor::new(chunk); |
457 | cursor.read_exact(&mut info.background_color)?; |
458 | self.loop_count = match cursor.read_u16::<LittleEndian>()? { |
459 | 0 => LoopCount::Forever, |
460 | n => LoopCount::Times(NonZeroU16::new(n).unwrap()), |
461 | }; |
462 | self.animation.next_frame_start = |
463 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
464 | } |
465 | Ok(None) => return Err(DecodingError::ChunkMissing), |
466 | Err(DecodingError::MemoryLimitExceeded) => { |
467 | return Err(DecodingError::InvalidChunkSize) |
468 | } |
469 | Err(e) => return Err(e), |
470 | } |
471 | } |
472 | |
473 | // If the image is animated, the image data chunk will be inside the ANMF chunks. We |
474 | // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the |
475 | // hashmap so that we can read them later. |
476 | if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() { |
477 | let mut position = range.start + 16; |
478 | self.r.seek(io::SeekFrom::Start(position))?; |
479 | for _ in 0..2 { |
480 | let (subchunk, subchunk_size, subchunk_size_rounded) = |
481 | read_chunk_header(&mut self.r)?; |
482 | let subrange = position + 8..position + 8 + subchunk_size; |
483 | self.chunks.entry(subchunk).or_insert(subrange.clone()); |
484 | |
485 | position += 8 + subchunk_size_rounded; |
486 | if position + 8 > range.end { |
487 | break; |
488 | } |
489 | } |
490 | } |
491 | |
492 | self.has_alpha = info.alpha; |
493 | self.kind = ImageKind::Extended(info); |
494 | } |
495 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), |
496 | }; |
497 | |
498 | Ok(()) |
499 | } |
500 | |
501 | /// Sets the maximum amount of memory that the decoder is allowed to allocate at once. |
502 | /// |
503 | /// TODO: Some allocations currently ignore this limit. |
504 | pub fn set_memory_limit(&mut self, limit: usize) { |
505 | self.memory_limit = limit; |
506 | } |
507 | |
508 | /// Sets the background color if the image is an extended and animated webp. |
509 | pub fn set_background_color(&mut self, color: [u8; 4]) -> Result<(), DecodingError> { |
510 | if let ImageKind::Extended(info) = &mut self.kind { |
511 | info.background_color = color; |
512 | Ok(()) |
513 | } else { |
514 | Err(DecodingError::InvalidParameter( |
515 | "Background color can only be set on animated webp" .to_owned(), |
516 | )) |
517 | } |
518 | } |
519 | |
520 | /// Returns the (width, height) of the image in pixels. |
521 | pub fn dimensions(&self) -> (u32, u32) { |
522 | (self.width, self.height) |
523 | } |
524 | |
525 | /// Returns whether the image has an alpha channel. If so, the pixel format is Rgba8 and |
526 | /// otherwise Rgb8. |
527 | pub fn has_alpha(&self) -> bool { |
528 | self.has_alpha |
529 | } |
530 | |
531 | /// Returns true if the image is animated. |
532 | pub fn is_animated(&self) -> bool { |
533 | match &self.kind { |
534 | ImageKind::Lossy | ImageKind::Lossless => false, |
535 | ImageKind::Extended(extended) => extended.animation, |
536 | } |
537 | } |
538 | |
539 | /// Returns whether the image is lossy. For animated images, this is true if any frame is lossy. |
540 | pub fn is_lossy(&mut self) -> bool { |
541 | self.is_lossy |
542 | } |
543 | |
544 | /// Returns the number of frames of a single loop of the animation, or zero if the image is not |
545 | /// animated. |
546 | pub fn num_frames(&self) -> u32 { |
547 | self.num_frames |
548 | } |
549 | |
550 | /// Returns the number of times the animation should loop. |
551 | pub fn loop_count(&self) -> LoopCount { |
552 | self.loop_count |
553 | } |
554 | |
555 | /// Returns the total duration of one loop through the animation in milliseconds, or zero if the |
556 | /// image is not animated. |
557 | /// |
558 | /// This is the sum of the durations of all individual frames of the image. |
559 | pub fn loop_duration(&self) -> u64 { |
560 | self.loop_duration |
561 | } |
562 | |
563 | fn read_chunk( |
564 | &mut self, |
565 | chunk: WebPRiffChunk, |
566 | max_size: usize, |
567 | ) -> Result<Option<Vec<u8>>, DecodingError> { |
568 | match self.chunks.get(&chunk) { |
569 | Some(range) => { |
570 | if range.end - range.start > max_size as u64 { |
571 | return Err(DecodingError::MemoryLimitExceeded); |
572 | } |
573 | |
574 | self.r.seek(io::SeekFrom::Start(range.start))?; |
575 | let mut data = vec![0; (range.end - range.start) as usize]; |
576 | self.r.read_exact(&mut data)?; |
577 | Ok(Some(data)) |
578 | } |
579 | None => Ok(None), |
580 | } |
581 | } |
582 | |
583 | /// Returns the raw bytes of the ICC profile, or None if there is no ICC profile. |
584 | pub fn icc_profile(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
585 | self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit) |
586 | } |
587 | |
588 | /// Returns the raw bytes of the EXIF metadata, or None if there is no EXIF metadata. |
589 | pub fn exif_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
590 | self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit) |
591 | } |
592 | |
593 | /// Returns the raw bytes of the XMP metadata, or None if there is no XMP metadata. |
594 | pub fn xmp_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
595 | self.read_chunk(WebPRiffChunk::XMP, self.memory_limit) |
596 | } |
597 | |
598 | /// Returns the number of bytes required to store the image or a single frame, or None if that |
599 | /// would take more than `usize::MAX` bytes. |
600 | pub fn output_buffer_size(&self) -> Option<usize> { |
601 | let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 }; |
602 | (self.width as usize) |
603 | .checked_mul(self.height as usize)? |
604 | .checked_mul(bytes_per_pixel) |
605 | } |
606 | |
607 | /// Returns the raw bytes of the image. For animated images, this is the first frame. |
608 | /// |
609 | /// Fails with `ImageTooLarge` if `buf` has length different than `output_buffer_size()` |
610 | pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> { |
611 | if Some(buf.len()) != self.output_buffer_size() { |
612 | return Err(DecodingError::ImageTooLarge); |
613 | } |
614 | |
615 | if self.is_animated() { |
616 | let saved = std::mem::take(&mut self.animation); |
617 | self.animation.next_frame_start = |
618 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
619 | let result = self.read_frame(buf); |
620 | self.animation = saved; |
621 | result?; |
622 | } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) { |
623 | let mut decoder = LosslessDecoder::new(range_reader(&mut self.r, range.clone())?); |
624 | |
625 | if self.has_alpha { |
626 | decoder.decode_frame(self.width, self.height, false, buf)?; |
627 | } else { |
628 | let mut data = vec![0; self.width as usize * self.height as usize * 4]; |
629 | decoder.decode_frame(self.width, self.height, false, &mut data)?; |
630 | for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) { |
631 | chunk.copy_from_slice(&rgba_val[..3]); |
632 | } |
633 | } |
634 | } else { |
635 | let range = self |
636 | .chunks |
637 | .get(&WebPRiffChunk::VP8) |
638 | .ok_or(DecodingError::ChunkMissing)?; |
639 | let reader = range_reader(&mut self.r, range.start..range.end)?; |
640 | let frame = Vp8Decoder::decode_frame(reader)?; |
641 | if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height { |
642 | return Err(DecodingError::InconsistentImageSizes); |
643 | } |
644 | |
645 | if self.has_alpha() { |
646 | frame.fill_rgba(buf); |
647 | |
648 | let range = self |
649 | .chunks |
650 | .get(&WebPRiffChunk::ALPH) |
651 | .ok_or(DecodingError::ChunkMissing)? |
652 | .clone(); |
653 | let alpha_chunk = read_alpha_chunk( |
654 | &mut range_reader(&mut self.r, range)?, |
655 | self.width as u16, |
656 | self.height as u16, |
657 | )?; |
658 | |
659 | for y in 0..frame.height { |
660 | for x in 0..frame.width { |
661 | let predictor: u8 = get_alpha_predictor( |
662 | x.into(), |
663 | y.into(), |
664 | frame.width.into(), |
665 | alpha_chunk.filtering_method, |
666 | buf, |
667 | ); |
668 | |
669 | let alpha_index = |
670 | usize::from(y) * usize::from(frame.width) + usize::from(x); |
671 | let buffer_index = alpha_index * 4 + 3; |
672 | |
673 | buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]); |
674 | } |
675 | } |
676 | } else { |
677 | frame.fill_rgb(buf); |
678 | } |
679 | } |
680 | |
681 | Ok(()) |
682 | } |
683 | |
684 | /// Reads the next frame of the animation. |
685 | /// |
686 | /// The frame contents are written into `buf` and the method returns the duration of the frame |
687 | /// in milliseconds. If there are no more frames, the method returns |
688 | /// `DecodingError::NoMoreFrames` and `buf` is left unchanged. |
689 | /// |
690 | /// # Panics |
691 | /// |
692 | /// Panics if the image is not animated. |
693 | pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodingError> { |
694 | assert!(self.is_animated()); |
695 | assert_eq!(Some(buf.len()), self.output_buffer_size()); |
696 | |
697 | if self.animation.next_frame == self.num_frames { |
698 | return Err(DecodingError::NoMoreFrames); |
699 | } |
700 | |
701 | let ImageKind::Extended(info) = &self.kind else { |
702 | unreachable!() |
703 | }; |
704 | |
705 | self.r |
706 | .seek(io::SeekFrom::Start(self.animation.next_frame_start))?; |
707 | |
708 | let anmf_size = match read_chunk_header(&mut self.r)? { |
709 | (WebPRiffChunk::ANMF, size, _) if size >= 32 => size, |
710 | _ => return Err(DecodingError::ChunkHeaderInvalid(*b"ANMF" )), |
711 | }; |
712 | |
713 | // Read ANMF chunk |
714 | let frame_x = extended::read_3_bytes(&mut self.r)? * 2; |
715 | let frame_y = extended::read_3_bytes(&mut self.r)? * 2; |
716 | let frame_width = extended::read_3_bytes(&mut self.r)? + 1; |
717 | let frame_height = extended::read_3_bytes(&mut self.r)? + 1; |
718 | if frame_width > 16384 || frame_height > 16384 { |
719 | return Err(DecodingError::ImageTooLarge); |
720 | } |
721 | if frame_x + frame_width > self.width || frame_y + frame_height > self.height { |
722 | return Err(DecodingError::FrameOutsideImage); |
723 | } |
724 | let duration = extended::read_3_bytes(&mut self.r)?; |
725 | let frame_info = self.r.read_u8()?; |
726 | let use_alpha_blending = frame_info & 0b00000010 == 0; |
727 | let dispose = frame_info & 0b00000001 != 0; |
728 | |
729 | let clear_color = if self.animation.dispose_next_frame { |
730 | Some(info.background_color) |
731 | } else { |
732 | None |
733 | }; |
734 | |
735 | // Read normal bitstream now |
736 | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; |
737 | if chunk_size_rounded + 24 > anmf_size { |
738 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); |
739 | } |
740 | |
741 | let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk { |
742 | WebPRiffChunk::VP8 => { |
743 | let reader = (&mut self.r).take(chunk_size); |
744 | let raw_frame = Vp8Decoder::decode_frame(reader)?; |
745 | if u32::from(raw_frame.width) != frame_width |
746 | || u32::from(raw_frame.height) != frame_height |
747 | { |
748 | return Err(DecodingError::InconsistentImageSizes); |
749 | } |
750 | let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3]; |
751 | raw_frame.fill_rgb(&mut rgb_frame); |
752 | (rgb_frame, false) |
753 | } |
754 | WebPRiffChunk::VP8L => { |
755 | let reader = (&mut self.r).take(chunk_size); |
756 | let mut lossless_decoder = LosslessDecoder::new(reader); |
757 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; |
758 | lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?; |
759 | (rgba_frame, true) |
760 | } |
761 | WebPRiffChunk::ALPH => { |
762 | if chunk_size_rounded + 32 > anmf_size { |
763 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); |
764 | } |
765 | |
766 | // read alpha |
767 | let next_chunk_start = self.r.stream_position()? + chunk_size_rounded; |
768 | let mut reader = (&mut self.r).take(chunk_size); |
769 | let alpha_chunk = |
770 | read_alpha_chunk(&mut reader, frame_width as u16, frame_height as u16)?; |
771 | |
772 | // read opaque |
773 | self.r.seek(io::SeekFrom::Start(next_chunk_start))?; |
774 | let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?; |
775 | if chunk_size + next_chunk_size + 32 > anmf_size { |
776 | return Err(DecodingError::ChunkHeaderInvalid(next_chunk.to_fourcc())); |
777 | } |
778 | |
779 | let frame = Vp8Decoder::decode_frame((&mut self.r).take(next_chunk_size))?; |
780 | |
781 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; |
782 | frame.fill_rgba(&mut rgba_frame); |
783 | |
784 | for y in 0..frame.height { |
785 | for x in 0..frame.width { |
786 | let predictor: u8 = get_alpha_predictor( |
787 | x.into(), |
788 | y.into(), |
789 | frame.width.into(), |
790 | alpha_chunk.filtering_method, |
791 | &rgba_frame, |
792 | ); |
793 | |
794 | let alpha_index = |
795 | usize::from(y) * usize::from(frame.width) + usize::from(x); |
796 | let buffer_index = alpha_index * 4 + 3; |
797 | |
798 | rgba_frame[buffer_index] = |
799 | predictor.wrapping_add(alpha_chunk.data[alpha_index]); |
800 | } |
801 | } |
802 | |
803 | (rgba_frame, true) |
804 | } |
805 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), |
806 | }; |
807 | |
808 | // fill starting canvas with clear color |
809 | if self.animation.canvas.is_none() { |
810 | self.animation.canvas = { |
811 | let mut canvas = vec![0; (self.width * self.height * 4) as usize]; |
812 | canvas |
813 | .chunks_exact_mut(4) |
814 | .for_each(|c| c.copy_from_slice(&info.background_color)); |
815 | Some(canvas) |
816 | } |
817 | } |
818 | extended::composite_frame( |
819 | self.animation.canvas.as_mut().unwrap(), |
820 | self.width, |
821 | self.height, |
822 | clear_color, |
823 | &frame, |
824 | frame_x, |
825 | frame_y, |
826 | frame_width, |
827 | frame_height, |
828 | frame_has_alpha, |
829 | use_alpha_blending, |
830 | self.animation.previous_frame_width, |
831 | self.animation.previous_frame_height, |
832 | self.animation.previous_frame_x_offset, |
833 | self.animation.previous_frame_y_offset, |
834 | ); |
835 | |
836 | self.animation.previous_frame_width = frame_width; |
837 | self.animation.previous_frame_height = frame_height; |
838 | self.animation.previous_frame_x_offset = frame_x; |
839 | self.animation.previous_frame_y_offset = frame_y; |
840 | |
841 | self.animation.dispose_next_frame = dispose; |
842 | self.animation.next_frame_start += anmf_size + 8; |
843 | self.animation.next_frame += 1; |
844 | |
845 | if self.has_alpha() { |
846 | buf.copy_from_slice(self.animation.canvas.as_ref().unwrap()); |
847 | } else { |
848 | for (b, c) in buf |
849 | .chunks_exact_mut(3) |
850 | .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4)) |
851 | { |
852 | b.copy_from_slice(&c[..3]); |
853 | } |
854 | } |
855 | |
856 | Ok(duration) |
857 | } |
858 | |
859 | /// Resets the animation to the first frame. |
860 | /// |
861 | /// # Panics |
862 | /// |
863 | /// Panics if the image is not animated. |
864 | pub fn reset_animation(&mut self) { |
865 | assert!(self.is_animated()); |
866 | |
867 | self.animation.next_frame = 0; |
868 | self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
869 | self.animation.dispose_next_frame = true; |
870 | } |
871 | } |
872 | |
873 | pub(crate) fn range_reader<R: BufRead + Seek>( |
874 | mut r: R, |
875 | range: Range<u64>, |
876 | ) -> Result<impl BufRead, DecodingError> { |
877 | r.seek(pos:io::SeekFrom::Start(range.start))?; |
878 | Ok(r.take(limit:range.end - range.start)) |
879 | } |
880 | |
881 | pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> { |
882 | let mut chunk_fourcc: [u8; 4] = [0; 4]; |
883 | r.read_exact(&mut chunk_fourcc)?; |
884 | Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) |
885 | } |
886 | |
887 | pub(crate) fn read_chunk_header<R: BufRead>( |
888 | mut r: R, |
889 | ) -> Result<(WebPRiffChunk, u64, u64), DecodingError> { |
890 | let chunk: WebPRiffChunk = read_fourcc(&mut r)?; |
891 | let chunk_size: u32 = r.read_u32::<LittleEndian>()?; |
892 | let chunk_size_rounded: u32 = chunk_size.saturating_add(chunk_size & 1); |
893 | Ok((chunk, chunk_size.into(), chunk_size_rounded.into())) |
894 | } |
895 | |
896 | #[cfg (test)] |
897 | mod tests { |
898 | use super::*; |
899 | const RGB_BPP: usize = 3; |
900 | |
901 | #[test ] |
902 | fn add_with_overflow_size() { |
903 | let bytes = vec![ |
904 | 0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64, |
905 | 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00, |
906 | 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, |
907 | 0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, |
908 | 0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49, |
909 | 0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46, |
910 | ]; |
911 | |
912 | let data = std::io::Cursor::new(bytes); |
913 | |
914 | let _ = WebPDecoder::new(data); |
915 | } |
916 | |
917 | #[test ] |
918 | fn decode_2x2_single_color_image() { |
919 | // Image data created from imagemagick and output of xxd: |
920 | // $ convert -size 2x2 xc:#f00 red.webp |
921 | // $ xxd -g 1 red.webp | head |
922 | |
923 | const NUM_PIXELS: usize = 2 * 2 * RGB_BPP; |
924 | // 2x2 red pixel image |
925 | let bytes = [ |
926 | 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50, |
927 | 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00, |
928 | 0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03, |
929 | 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff, |
930 | 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00, |
931 | ]; |
932 | |
933 | let mut data = [0; NUM_PIXELS]; |
934 | let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap(); |
935 | decoder.read_image(&mut data).unwrap(); |
936 | |
937 | // All pixels are the same value |
938 | let first_pixel = &data[..RGB_BPP]; |
939 | assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel))); |
940 | } |
941 | |
942 | #[test ] |
943 | fn decode_3x3_single_color_image() { |
944 | // Test that any odd pixel "tail" is decoded properly |
945 | |
946 | const NUM_PIXELS: usize = 3 * 3 * RGB_BPP; |
947 | // 3x3 red pixel image |
948 | let bytes = [ |
949 | 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50, |
950 | 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00, |
951 | 0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03, |
952 | 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff, |
953 | 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00, |
954 | ]; |
955 | |
956 | let mut data = [0; NUM_PIXELS]; |
957 | let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap(); |
958 | decoder.read_image(&mut data).unwrap(); |
959 | |
960 | // All pixels are the same value |
961 | let first_pixel = &data[..RGB_BPP]; |
962 | assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel))); |
963 | } |
964 | } |
965 | |