1use std::io::{self, Cursor, Error, Read, Seek};
2use std::{error, fmt};
3
4use super::decoder::{
5 read_chunk, read_fourcc, read_len_cursor, DecoderError::ChunkHeaderInvalid, WebPRiffChunk,
6};
7use super::lossless::{LosslessDecoder, LosslessFrame};
8use super::vp8::{Frame as VP8Frame, Vp8Decoder};
9use crate::error::{DecodingError, ParameterError, ParameterErrorKind};
10use crate::image::ImageFormat;
11use crate::{
12 ColorType, Delay, Frame, Frames, ImageError, ImageResult, Rgb, RgbImage, Rgba, RgbaImage,
13};
14use byteorder::{LittleEndian, ReadBytesExt};
15
16//all errors that can occur while parsing extended chunks in a WebP file
17#[derive(Debug, Clone, Copy)]
18enum DecoderError {
19 // Some bits were invalid
20 InfoBitsInvalid { name: &'static str, value: u32 },
21 // Alpha chunk doesn't match the frame's size
22 AlphaChunkSizeMismatch,
23 // Image is too large, either for the platform's pointer size or generally
24 ImageTooLarge,
25 // Frame would go out of the canvas
26 FrameOutsideImage,
27}
28
29impl fmt::Display for DecoderError {
30 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31 match self {
32 DecoderError::InfoBitsInvalid { name: &&str, value: &u32 } => f.write_fmt(format_args!(
33 "Info bits `{}` invalid, received value: {}",
34 name, value
35 )),
36 DecoderError::AlphaChunkSizeMismatch => {
37 f.write_str(data:"Alpha chunk doesn't match the size of the frame")
38 }
39 DecoderError::ImageTooLarge => f.write_str(data:"Image is too large to be decoded"),
40 DecoderError::FrameOutsideImage => {
41 f.write_str(data:"Frame is too large and would go outside the image")
42 }
43 }
44 }
45}
46
47impl From<DecoderError> for ImageError {
48 fn from(e: DecoderError) -> ImageError {
49 ImageError::Decoding(DecodingError::new(format:ImageFormat::WebP.into(), err:e))
50 }
51}
52
53impl error::Error for DecoderError {}
54
55#[derive(Debug, Clone)]
56pub(crate) struct WebPExtendedInfo {
57 _icc_profile: bool,
58 _alpha: bool,
59 _exif_metadata: bool,
60 _xmp_metadata: bool,
61 _animation: bool,
62 canvas_width: u32,
63 canvas_height: u32,
64 icc_profile: Option<Vec<u8>>,
65}
66
67#[derive(Debug)]
68enum ExtendedImageData {
69 Animation {
70 frames: Vec<Vec<u8>>,
71 first_frame: AnimatedFrame,
72 anim_info: WebPAnimatedInfo,
73 },
74 Static(WebPStatic),
75}
76
77#[derive(Debug)]
78pub(crate) struct ExtendedImage {
79 info: WebPExtendedInfo,
80 image: ExtendedImageData,
81}
82
83impl ExtendedImage {
84 pub(crate) fn dimensions(&self) -> (u32, u32) {
85 (self.info.canvas_width, self.info.canvas_height)
86 }
87
88 pub(crate) fn has_animation(&self) -> bool {
89 self.info._animation
90 }
91
92 pub(crate) fn icc_profile(&self) -> Option<Vec<u8>> {
93 self.info.icc_profile.clone()
94 }
95
96 pub(crate) fn color_type(&self) -> ColorType {
97 match &self.image {
98 ExtendedImageData::Animation { first_frame, .. } => &first_frame.image,
99 ExtendedImageData::Static(image) => image,
100 }
101 .color_type()
102 }
103
104 pub(crate) fn into_frames<'a>(self) -> Frames<'a> {
105 struct FrameIterator {
106 image: ExtendedImage,
107 index: usize,
108 canvas: RgbaImage,
109 }
110
111 impl Iterator for FrameIterator {
112 type Item = ImageResult<Frame>;
113
114 fn next(&mut self) -> Option<Self::Item> {
115 if let ExtendedImageData::Animation {
116 frames,
117 anim_info,
118 first_frame,
119 } = &self.image.image
120 {
121 let anim_frame_data = frames.get(self.index)?;
122 let anim_frame;
123 let frame;
124
125 if self.index == 0 {
126 // Use already decoded first frame
127 anim_frame = first_frame;
128 } else {
129 frame = read_anim_frame(
130 &mut Cursor::new(anim_frame_data),
131 self.image.info.canvas_width,
132 self.image.info.canvas_height,
133 )
134 .ok()?;
135 anim_frame = &frame;
136 };
137
138 self.index += 1;
139 ExtendedImage::draw_subimage(
140 &mut self.canvas,
141 anim_frame,
142 anim_info.background_color,
143 )
144 } else {
145 None
146 }
147 }
148 }
149
150 let width = self.info.canvas_width;
151 let height = self.info.canvas_height;
152 let background_color =
153 if let ExtendedImageData::Animation { ref anim_info, .. } = self.image {
154 anim_info.background_color
155 } else {
156 Rgba([0, 0, 0, 0])
157 };
158
159 let frame_iter = FrameIterator {
160 image: self,
161 index: 0,
162 canvas: RgbaImage::from_pixel(width, height, background_color),
163 };
164
165 Frames::new(Box::new(frame_iter))
166 }
167
168 pub(crate) fn read_extended_chunks<R: Read>(
169 reader: &mut R,
170 mut info: WebPExtendedInfo,
171 ) -> ImageResult<ExtendedImage> {
172 let mut anim_info: Option<WebPAnimatedInfo> = None;
173 let mut anim_frames: Vec<Vec<u8>> = Vec::new();
174 let mut anim_first_frame: Option<AnimatedFrame> = None;
175 let mut static_frame: Option<WebPStatic> = None;
176 //go until end of file and while chunk headers are valid
177 while let Some((mut cursor, chunk)) = read_extended_chunk(reader)? {
178 match chunk {
179 WebPRiffChunk::EXIF | WebPRiffChunk::XMP => {
180 //ignore these chunks
181 }
182 WebPRiffChunk::ANIM => {
183 if anim_info.is_none() {
184 anim_info = Some(Self::read_anim_info(&mut cursor)?);
185 }
186 }
187 WebPRiffChunk::ANMF => {
188 let mut frame_data = Vec::new();
189
190 // Store first frame decoded to avoid decoding it for certain function calls
191 if anim_first_frame.is_none() {
192 anim_first_frame = Some(read_anim_frame(
193 &mut cursor,
194 info.canvas_width,
195 info.canvas_height,
196 )?);
197
198 cursor.rewind().unwrap();
199 }
200
201 cursor.read_to_end(&mut frame_data)?;
202 anim_frames.push(frame_data);
203 }
204 WebPRiffChunk::ALPH => {
205 if static_frame.is_none() {
206 let alpha_chunk =
207 read_alpha_chunk(&mut cursor, info.canvas_width, info.canvas_height)?;
208
209 let vp8_frame = read_lossy_with_chunk(reader)?;
210
211 let img = WebPStatic::from_alpha_lossy(alpha_chunk, vp8_frame)?;
212
213 static_frame = Some(img);
214 }
215 }
216 WebPRiffChunk::ICCP => {
217 let mut icc_profile = Vec::new();
218 cursor.read_to_end(&mut icc_profile)?;
219 info.icc_profile = Some(icc_profile);
220 }
221 WebPRiffChunk::VP8 => {
222 if static_frame.is_none() {
223 let vp8_frame = read_lossy(cursor)?;
224
225 let img = WebPStatic::from_lossy(vp8_frame)?;
226
227 static_frame = Some(img);
228 }
229 }
230 WebPRiffChunk::VP8L => {
231 if static_frame.is_none() {
232 let mut lossless_decoder = LosslessDecoder::new(cursor);
233 let frame = lossless_decoder.decode_frame()?;
234 let image = WebPStatic::Lossless(frame.clone());
235
236 static_frame = Some(image);
237 }
238 }
239 _ => return Err(ChunkHeaderInvalid(chunk.to_fourcc()).into()),
240 }
241 }
242
243 let image = if let (Some(anim_info), Some(first_frame)) = (anim_info, anim_first_frame) {
244 ExtendedImageData::Animation {
245 frames: anim_frames,
246 first_frame,
247 anim_info,
248 }
249 } else if let Some(frame) = static_frame {
250 ExtendedImageData::Static(frame)
251 } else {
252 //reached end of file too early before image data was reached
253 return Err(ImageError::IoError(Error::from(
254 io::ErrorKind::UnexpectedEof,
255 )));
256 };
257
258 let image = ExtendedImage { image, info };
259
260 Ok(image)
261 }
262
263 fn read_anim_info<R: Read>(reader: &mut R) -> ImageResult<WebPAnimatedInfo> {
264 let mut colors: [u8; 4] = [0; 4];
265 reader.read_exact(&mut colors)?;
266
267 //background color is [blue, green, red, alpha]
268 let background_color = Rgba([colors[2], colors[1], colors[0], colors[3]]);
269
270 let loop_count = reader.read_u16::<LittleEndian>()?;
271
272 let info = WebPAnimatedInfo {
273 background_color,
274 _loop_count: loop_count,
275 };
276
277 Ok(info)
278 }
279
280 fn draw_subimage(
281 canvas: &mut RgbaImage,
282 anim_image: &AnimatedFrame,
283 background_color: Rgba<u8>,
284 ) -> Option<ImageResult<Frame>> {
285 let mut buffer = vec![0; anim_image.image.get_buf_size()];
286 anim_image.image.fill_buf(&mut buffer);
287 let has_alpha = anim_image.image.has_alpha();
288 let pixel_len: u32 = anim_image.image.color_type().bytes_per_pixel().into();
289
290 'x: for x in 0..anim_image.width {
291 for y in 0..anim_image.height {
292 let canvas_index: (u32, u32) = (x + anim_image.offset_x, y + anim_image.offset_y);
293 // Negative offsets are not possible due to unsigned ints
294 // If we go out of bounds by height, still continue by x
295 if canvas_index.1 >= canvas.height() {
296 continue 'x;
297 }
298 // If we go out of bounds by width, it doesn't make sense to continue at all
299 if canvas_index.0 >= canvas.width() {
300 break 'x;
301 }
302 let index: usize = ((y * anim_image.width + x) * pixel_len).try_into().unwrap();
303 canvas[canvas_index] = if anim_image.use_alpha_blending && has_alpha {
304 let buffer: [u8; 4] = buffer[index..][..4].try_into().unwrap();
305 ExtendedImage::do_alpha_blending(buffer, canvas[canvas_index])
306 } else {
307 Rgba([
308 buffer[index],
309 buffer[index + 1],
310 buffer[index + 2],
311 if has_alpha { buffer[index + 3] } else { 255 },
312 ])
313 };
314 }
315 }
316
317 let delay = Delay::from_numer_denom_ms(anim_image.duration, 1);
318 let img = canvas.clone();
319 let frame = Frame::from_parts(img, 0, 0, delay);
320
321 if anim_image.dispose {
322 for x in 0..anim_image.width {
323 for y in 0..anim_image.height {
324 let canvas_index = (x + anim_image.offset_x, y + anim_image.offset_y);
325 canvas[canvas_index] = background_color;
326 }
327 }
328 }
329
330 Some(Ok(frame))
331 }
332
333 fn do_alpha_blending(buffer: [u8; 4], canvas: Rgba<u8>) -> Rgba<u8> {
334 let canvas_alpha = f64::from(canvas[3]);
335 let buffer_alpha = f64::from(buffer[3]);
336 let blend_alpha_f64 = buffer_alpha + canvas_alpha * (1.0 - buffer_alpha / 255.0);
337 //value should be between 0 and 255, this truncates the fractional part
338 let blend_alpha: u8 = blend_alpha_f64 as u8;
339
340 let blend_rgb: [u8; 3] = if blend_alpha == 0 {
341 [0, 0, 0]
342 } else {
343 let mut rgb = [0u8; 3];
344 for i in 0..3 {
345 let canvas_f64 = f64::from(canvas[i]);
346 let buffer_f64 = f64::from(buffer[i]);
347
348 let val = (buffer_f64 * buffer_alpha
349 + canvas_f64 * canvas_alpha * (1.0 - buffer_alpha / 255.0))
350 / blend_alpha_f64;
351 //value should be between 0 and 255, this truncates the fractional part
352 rgb[i] = val as u8;
353 }
354
355 rgb
356 };
357
358 Rgba([blend_rgb[0], blend_rgb[1], blend_rgb[2], blend_alpha])
359 }
360
361 pub(crate) fn fill_buf(&self, buf: &mut [u8]) {
362 match &self.image {
363 // will always have at least one frame
364 ExtendedImageData::Animation {
365 anim_info,
366 first_frame,
367 ..
368 } => {
369 let (canvas_width, canvas_height) = self.dimensions();
370 if canvas_width == first_frame.width && canvas_height == first_frame.height {
371 first_frame.image.fill_buf(buf);
372 } else {
373 let bg_color = match &self.info._alpha {
374 true => Rgba::from([0, 0, 0, 0]),
375 false => anim_info.background_color,
376 };
377 let mut canvas = RgbaImage::from_pixel(canvas_width, canvas_height, bg_color);
378 let _ = ExtendedImage::draw_subimage(&mut canvas, first_frame, bg_color)
379 .unwrap()
380 .unwrap();
381 buf.copy_from_slice(canvas.into_raw().as_slice());
382 }
383 }
384 ExtendedImageData::Static(image) => {
385 image.fill_buf(buf);
386 }
387 }
388 }
389
390 pub(crate) fn get_buf_size(&self) -> usize {
391 match &self.image {
392 // will always have at least one frame
393 ExtendedImageData::Animation { first_frame, .. } => &first_frame.image,
394 ExtendedImageData::Static(image) => image,
395 }
396 .get_buf_size()
397 }
398
399 pub(crate) fn set_background_color(&mut self, color: Rgba<u8>) -> ImageResult<()> {
400 match &mut self.image {
401 ExtendedImageData::Animation { anim_info, .. } => {
402 anim_info.background_color = color;
403 Ok(())
404 }
405 _ => Err(ImageError::Parameter(ParameterError::from_kind(
406 ParameterErrorKind::Generic(
407 "Background color can only be set on animated webp".to_owned(),
408 ),
409 ))),
410 }
411 }
412}
413
414#[derive(Debug)]
415enum WebPStatic {
416 LossyWithAlpha(RgbaImage),
417 LossyWithoutAlpha(RgbImage),
418 Lossless(LosslessFrame),
419}
420
421impl WebPStatic {
422 pub(crate) fn from_alpha_lossy(
423 alpha: AlphaChunk,
424 vp8_frame: VP8Frame,
425 ) -> ImageResult<WebPStatic> {
426 if alpha.data.len() != usize::from(vp8_frame.width) * usize::from(vp8_frame.height) {
427 return Err(DecoderError::AlphaChunkSizeMismatch.into());
428 }
429
430 let size = usize::from(vp8_frame.width).checked_mul(usize::from(vp8_frame.height) * 4);
431 let mut image_vec = match size {
432 Some(size) => vec![0u8; size],
433 None => return Err(DecoderError::ImageTooLarge.into()),
434 };
435
436 vp8_frame.fill_rgba(&mut image_vec);
437
438 for y in 0..vp8_frame.height {
439 for x in 0..vp8_frame.width {
440 let predictor: u8 = WebPStatic::get_predictor(
441 x.into(),
442 y.into(),
443 vp8_frame.width.into(),
444 alpha.filtering_method,
445 &image_vec,
446 );
447 let predictor = u16::from(predictor);
448
449 let alpha_index = usize::from(y) * usize::from(vp8_frame.width) + usize::from(x);
450 let alpha_val = alpha.data[alpha_index];
451 let alpha: u8 = ((predictor + u16::from(alpha_val)) % 256)
452 .try_into()
453 .unwrap();
454
455 let alpha_index = alpha_index * 4 + 3;
456 image_vec[alpha_index] = alpha;
457 }
458 }
459
460 let image = RgbaImage::from_vec(vp8_frame.width.into(), vp8_frame.height.into(), image_vec)
461 .unwrap();
462
463 Ok(WebPStatic::LossyWithAlpha(image))
464 }
465
466 fn get_predictor(
467 x: usize,
468 y: usize,
469 width: usize,
470 filtering_method: FilteringMethod,
471 image_slice: &[u8],
472 ) -> u8 {
473 match filtering_method {
474 FilteringMethod::None => 0,
475 FilteringMethod::Horizontal => {
476 if x == 0 && y == 0 {
477 0
478 } else if x == 0 {
479 let index = (y - 1) * width + x;
480 image_slice[index * 4 + 3]
481 } else {
482 let index = y * width + x - 1;
483 image_slice[index * 4 + 3]
484 }
485 }
486 FilteringMethod::Vertical => {
487 if x == 0 && y == 0 {
488 0
489 } else if y == 0 {
490 let index = y * width + x - 1;
491 image_slice[index * 4 + 3]
492 } else {
493 let index = (y - 1) * width + x;
494 image_slice[index * 4 + 3]
495 }
496 }
497 FilteringMethod::Gradient => {
498 let (left, top, top_left) = match (x, y) {
499 (0, 0) => (0, 0, 0),
500 (0, y) => {
501 let above_index = (y - 1) * width + x;
502 let val = image_slice[above_index * 4 + 3];
503 (val, val, val)
504 }
505 (x, 0) => {
506 let before_index = y * width + x - 1;
507 let val = image_slice[before_index * 4 + 3];
508 (val, val, val)
509 }
510 (x, y) => {
511 let left_index = y * width + x - 1;
512 let left = image_slice[left_index * 4 + 3];
513 let top_index = (y - 1) * width + x;
514 let top = image_slice[top_index * 4 + 3];
515 let top_left_index = (y - 1) * width + x - 1;
516 let top_left = image_slice[top_left_index * 4 + 3];
517
518 (left, top, top_left)
519 }
520 };
521
522 let combination = i16::from(left) + i16::from(top) - i16::from(top_left);
523 i16::clamp(combination, 0, 255).try_into().unwrap()
524 }
525 }
526 }
527
528 pub(crate) fn from_lossy(vp8_frame: VP8Frame) -> ImageResult<WebPStatic> {
529 let mut image = RgbImage::from_pixel(
530 vp8_frame.width.into(),
531 vp8_frame.height.into(),
532 Rgb([0, 0, 0]),
533 );
534
535 vp8_frame.fill_rgb(&mut image);
536
537 Ok(WebPStatic::LossyWithoutAlpha(image))
538 }
539
540 pub(crate) fn fill_buf(&self, buf: &mut [u8]) {
541 match self {
542 WebPStatic::LossyWithAlpha(image) => {
543 buf.copy_from_slice(image);
544 }
545 WebPStatic::LossyWithoutAlpha(image) => {
546 buf.copy_from_slice(image);
547 }
548 WebPStatic::Lossless(lossless) => {
549 lossless.fill_rgba(buf);
550 }
551 }
552 }
553
554 pub(crate) fn get_buf_size(&self) -> usize {
555 match self {
556 WebPStatic::LossyWithAlpha(rgb_image) => rgb_image.len(),
557 WebPStatic::LossyWithoutAlpha(rgba_image) => rgba_image.len(),
558 WebPStatic::Lossless(lossless) => lossless.get_buf_size(),
559 }
560 }
561
562 pub(crate) fn color_type(&self) -> ColorType {
563 if self.has_alpha() {
564 ColorType::Rgba8
565 } else {
566 ColorType::Rgb8
567 }
568 }
569
570 pub(crate) fn has_alpha(&self) -> bool {
571 match self {
572 Self::LossyWithAlpha(..) | Self::Lossless(..) => true,
573 Self::LossyWithoutAlpha(..) => false,
574 }
575 }
576}
577
578#[derive(Debug)]
579struct WebPAnimatedInfo {
580 background_color: Rgba<u8>,
581 _loop_count: u16,
582}
583
584#[derive(Debug)]
585struct AnimatedFrame {
586 offset_x: u32,
587 offset_y: u32,
588 width: u32,
589 height: u32,
590 duration: u32,
591 use_alpha_blending: bool,
592 dispose: bool,
593 image: WebPStatic,
594}
595
596/// Reads a chunk, but silently ignores unknown chunks at the end of a file
597fn read_extended_chunk<R>(r: &mut R) -> ImageResult<Option<(Cursor<Vec<u8>>, WebPRiffChunk)>>
598where
599 R: Read,
600{
601 let mut unknown_chunk: Result<(), ImageError> = Ok(());
602
603 while let Some(chunk: Result) = read_fourcc(r)? {
604 let cursor: Cursor> = read_len_cursor(r)?;
605 match chunk {
606 Ok(chunk: WebPRiffChunk) => return unknown_chunk.and(res:Ok(Some((cursor, chunk)))),
607 Err(err: ImageError) => unknown_chunk = unknown_chunk.and(res:Err(err)),
608 }
609 }
610
611 Ok(None)
612}
613
614pub(crate) fn read_extended_header<R: Read>(reader: &mut R) -> ImageResult<WebPExtendedInfo> {
615 let chunk_flags = reader.read_u8()?;
616
617 let reserved_first = chunk_flags & 0b11000000;
618 let icc_profile = chunk_flags & 0b00100000 != 0;
619 let alpha = chunk_flags & 0b00010000 != 0;
620 let exif_metadata = chunk_flags & 0b00001000 != 0;
621 let xmp_metadata = chunk_flags & 0b00000100 != 0;
622 let animation = chunk_flags & 0b00000010 != 0;
623 let reserved_second = chunk_flags & 0b00000001;
624
625 let reserved_third = read_3_bytes(reader)?;
626
627 if reserved_first != 0 || reserved_second != 0 || reserved_third != 0 {
628 let value: u32 = if reserved_first != 0 {
629 reserved_first.into()
630 } else if reserved_second != 0 {
631 reserved_second.into()
632 } else {
633 reserved_third
634 };
635 return Err(DecoderError::InfoBitsInvalid {
636 name: "reserved",
637 value,
638 }
639 .into());
640 }
641
642 let canvas_width = read_3_bytes(reader)? + 1;
643 let canvas_height = read_3_bytes(reader)? + 1;
644
645 //product of canvas dimensions cannot be larger than u32 max
646 if u32::checked_mul(canvas_width, canvas_height).is_none() {
647 return Err(DecoderError::ImageTooLarge.into());
648 }
649
650 let info = WebPExtendedInfo {
651 _icc_profile: icc_profile,
652 _alpha: alpha,
653 _exif_metadata: exif_metadata,
654 _xmp_metadata: xmp_metadata,
655 _animation: animation,
656 canvas_width,
657 canvas_height,
658 icc_profile: None,
659 };
660
661 Ok(info)
662}
663
664fn read_anim_frame<R: Read>(
665 mut reader: R,
666 canvas_width: u32,
667 canvas_height: u32,
668) -> ImageResult<AnimatedFrame> {
669 //offsets for the frames are twice the values
670 let frame_x = read_3_bytes(&mut reader)? * 2;
671 let frame_y = read_3_bytes(&mut reader)? * 2;
672
673 let frame_width = read_3_bytes(&mut reader)? + 1;
674 let frame_height = read_3_bytes(&mut reader)? + 1;
675
676 if frame_x + frame_width > canvas_width || frame_y + frame_height > canvas_height {
677 return Err(DecoderError::FrameOutsideImage.into());
678 }
679
680 let duration = read_3_bytes(&mut reader)?;
681
682 let frame_info = reader.read_u8()?;
683 let reserved = frame_info & 0b11111100;
684 if reserved != 0 {
685 return Err(DecoderError::InfoBitsInvalid {
686 name: "reserved",
687 value: reserved.into(),
688 }
689 .into());
690 }
691 let use_alpha_blending = frame_info & 0b00000010 == 0;
692 let dispose = frame_info & 0b00000001 != 0;
693
694 //read normal bitstream now
695 let static_image = read_image(&mut reader, frame_width, frame_height)?;
696
697 let frame = AnimatedFrame {
698 offset_x: frame_x,
699 offset_y: frame_y,
700 width: frame_width,
701 height: frame_height,
702 duration,
703 use_alpha_blending,
704 dispose,
705 image: static_image,
706 };
707
708 Ok(frame)
709}
710
711fn read_3_bytes<R: Read>(reader: &mut R) -> ImageResult<u32> {
712 let mut buffer: [u8; 3] = [0; 3];
713 reader.read_exact(&mut buffer)?;
714 let value: u32 =
715 (u32::from(buffer[2]) << 16) | (u32::from(buffer[1]) << 8) | u32::from(buffer[0]);
716 Ok(value)
717}
718
719fn read_lossy_with_chunk<R: Read>(reader: &mut R) -> ImageResult<VP8Frame> {
720 let (cursor: Cursor>, chunk: WebPRiffChunk) =
721 read_chunk(reader)?.ok_or_else(|| Error::from(io::ErrorKind::UnexpectedEof))?;
722
723 if chunk != WebPRiffChunk::VP8 {
724 return Err(ChunkHeaderInvalid(chunk.to_fourcc()).into());
725 }
726
727 read_lossy(cursor)
728}
729
730fn read_lossy(cursor: Cursor<Vec<u8>>) -> ImageResult<VP8Frame> {
731 let mut vp8_decoder: Vp8Decoder>> = Vp8Decoder::new(cursor);
732 let frame: &Frame = vp8_decoder.decode_frame()?;
733
734 Ok(frame.clone())
735}
736
737fn read_image<R: Read>(reader: &mut R, width: u32, height: u32) -> ImageResult<WebPStatic> {
738 let chunk = read_chunk(reader)?;
739
740 match chunk {
741 Some((cursor, WebPRiffChunk::VP8)) => {
742 let mut vp8_decoder = Vp8Decoder::new(cursor);
743 let frame = vp8_decoder.decode_frame()?;
744
745 let img = WebPStatic::from_lossy(frame.clone())?;
746
747 Ok(img)
748 }
749 Some((cursor, WebPRiffChunk::VP8L)) => {
750 let mut lossless_decoder = LosslessDecoder::new(cursor);
751 let frame = lossless_decoder.decode_frame()?;
752
753 let img = WebPStatic::Lossless(frame.clone());
754
755 Ok(img)
756 }
757 Some((mut cursor, WebPRiffChunk::ALPH)) => {
758 let alpha_chunk = read_alpha_chunk(&mut cursor, width, height)?;
759
760 let vp8_frame = read_lossy_with_chunk(reader)?;
761
762 let img = WebPStatic::from_alpha_lossy(alpha_chunk, vp8_frame)?;
763
764 Ok(img)
765 }
766 None => Err(ImageError::IoError(Error::from(
767 io::ErrorKind::UnexpectedEof,
768 ))),
769 Some((_, chunk)) => Err(ChunkHeaderInvalid(chunk.to_fourcc()).into()),
770 }
771}
772
773#[derive(Debug)]
774struct AlphaChunk {
775 _preprocessing: bool,
776 filtering_method: FilteringMethod,
777 data: Vec<u8>,
778}
779
780#[derive(Debug, Copy, Clone)]
781enum FilteringMethod {
782 None,
783 Horizontal,
784 Vertical,
785 Gradient,
786}
787
788fn read_alpha_chunk<R: Read>(reader: &mut R, width: u32, height: u32) -> ImageResult<AlphaChunk> {
789 let info_byte = reader.read_u8()?;
790
791 let reserved = info_byte & 0b11000000;
792 let preprocessing = (info_byte & 0b00110000) >> 4;
793 let filtering = (info_byte & 0b00001100) >> 2;
794 let compression = info_byte & 0b00000011;
795
796 if reserved != 0 {
797 return Err(DecoderError::InfoBitsInvalid {
798 name: "reserved",
799 value: reserved.into(),
800 }
801 .into());
802 }
803
804 let preprocessing = match preprocessing {
805 0 => false,
806 1 => true,
807 _ => {
808 return Err(DecoderError::InfoBitsInvalid {
809 name: "reserved",
810 value: preprocessing.into(),
811 }
812 .into())
813 }
814 };
815
816 let filtering_method = match filtering {
817 0 => FilteringMethod::None,
818 1 => FilteringMethod::Horizontal,
819 2 => FilteringMethod::Vertical,
820 3 => FilteringMethod::Gradient,
821 _ => unreachable!(),
822 };
823
824 let lossless_compression = match compression {
825 0 => false,
826 1 => true,
827 _ => {
828 return Err(DecoderError::InfoBitsInvalid {
829 name: "lossless compression",
830 value: compression.into(),
831 }
832 .into())
833 }
834 };
835
836 let mut framedata = Vec::new();
837 reader.read_to_end(&mut framedata)?;
838
839 let data = if lossless_compression {
840 let cursor = io::Cursor::new(framedata);
841
842 let mut decoder = LosslessDecoder::new(cursor);
843 //this is a potential problem for large images; would require rewriting lossless decoder to use u32 for width and height
844 let width: u16 = width
845 .try_into()
846 .map_err(|_| ImageError::from(DecoderError::ImageTooLarge))?;
847 let height: u16 = height
848 .try_into()
849 .map_err(|_| ImageError::from(DecoderError::ImageTooLarge))?;
850 let frame = decoder.decode_frame_implicit_dims(width, height)?;
851
852 let mut data = vec![0u8; usize::from(width) * usize::from(height)];
853
854 frame.fill_green(&mut data);
855
856 data
857 } else {
858 framedata
859 };
860
861 let chunk = AlphaChunk {
862 _preprocessing: preprocessing,
863 filtering_method,
864 data,
865 };
866
867 Ok(chunk)
868}
869