1 | // Copyright (C) 2019 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qwebphandler_p.h" |
5 | #include "webp/mux.h" |
6 | #include "webp/encode.h" |
7 | #include <qcolor.h> |
8 | #include <qimage.h> |
9 | #include <qdebug.h> |
10 | #include <qpainter.h> |
11 | #include <qvariant.h> |
12 | #include <QtEndian> |
13 | |
14 | static const int = 12; // RIFF_HEADER_SIZE from webp/format_constants.h |
15 | |
16 | QWebpHandler::QWebpHandler() : |
17 | m_quality(75), |
18 | m_scanState(ScanNotScanned), |
19 | m_features(), |
20 | m_formatFlags(0), |
21 | m_loop(0), |
22 | m_frameCount(0), |
23 | m_demuxer(NULL), |
24 | m_composited(NULL) |
25 | { |
26 | memset(s: &m_iter, c: 0, n: sizeof(m_iter)); |
27 | } |
28 | |
29 | QWebpHandler::~QWebpHandler() |
30 | { |
31 | WebPDemuxReleaseIterator(iter: &m_iter); |
32 | WebPDemuxDelete(dmux: m_demuxer); |
33 | delete m_composited; |
34 | } |
35 | |
36 | bool QWebpHandler::canRead() const |
37 | { |
38 | if (m_scanState == ScanNotScanned && !canRead(device: device())) |
39 | return false; |
40 | |
41 | if (m_scanState != ScanError) { |
42 | setFormat(QByteArrayLiteral("webp" )); |
43 | |
44 | if (m_features.has_animation && m_iter.frame_num >= m_frameCount) |
45 | return false; |
46 | |
47 | return true; |
48 | } |
49 | return false; |
50 | } |
51 | |
52 | bool QWebpHandler::canRead(QIODevice *device) |
53 | { |
54 | if (!device) { |
55 | qWarning(msg: "QWebpHandler::canRead() called with no device" ); |
56 | return false; |
57 | } |
58 | |
59 | QByteArray = device->peek(maxlen: riffHeaderSize); |
60 | return header.startsWith(bv: "RIFF" ) && header.endsWith(bv: "WEBP" ); |
61 | } |
62 | |
63 | bool QWebpHandler::ensureScanned() const |
64 | { |
65 | if (m_scanState != ScanNotScanned) |
66 | return m_scanState == ScanSuccess; |
67 | |
68 | m_scanState = ScanError; |
69 | |
70 | QWebpHandler *that = const_cast<QWebpHandler *>(this); |
71 | const int = sizeof(WebPBitstreamFeatures); |
72 | QByteArray = device()->peek(maxlen: headerBytesNeeded); |
73 | if (header.size() < headerBytesNeeded) |
74 | return false; |
75 | |
76 | // We do no random access during decoding, just a readAll() of the whole image file. So if |
77 | // if it is all available already, we can accept a sequential device. The riff header contains |
78 | // the file size minus 8 bytes header |
79 | qint64 byteSize = qFromLittleEndian<quint32>(src: header.constData() + 4); |
80 | if (device()->isSequential() && device()->bytesAvailable() < byteSize + 8) { |
81 | qWarning() << "QWebpHandler: Insufficient data available in sequential device" ; |
82 | return false; |
83 | } |
84 | if (WebPGetFeatures(data: (const uint8_t*)header.constData(), data_size: header.size(), features: &(that->m_features)) == VP8_STATUS_OK) { |
85 | if (m_features.has_animation) { |
86 | // For animation, we have to read and scan whole file to determine loop count and images count |
87 | if (that->ensureDemuxer()) { |
88 | that->m_loop = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_LOOP_COUNT); |
89 | that->m_frameCount = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_FRAME_COUNT); |
90 | that->m_bgColor = QColor::fromRgba(rgba: QRgb(WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_BACKGROUND_COLOR))); |
91 | |
92 | QSize sz(that->m_features.width, that->m_features.height); |
93 | that->m_composited = new QImage; |
94 | if (!QImageIOHandler::allocateImage(size: sz, format: QImage::Format_ARGB32, image: that->m_composited)) |
95 | return false; |
96 | if (that->m_features.has_alpha) |
97 | that->m_composited->fill(color: Qt::transparent); |
98 | |
99 | m_scanState = ScanSuccess; |
100 | } |
101 | } else { |
102 | m_scanState = ScanSuccess; |
103 | } |
104 | } |
105 | |
106 | return m_scanState == ScanSuccess; |
107 | } |
108 | |
109 | bool QWebpHandler::ensureDemuxer() |
110 | { |
111 | if (m_demuxer) |
112 | return true; |
113 | |
114 | m_rawData = device()->readAll(); |
115 | m_webpData.bytes = reinterpret_cast<const uint8_t *>(m_rawData.constData()); |
116 | m_webpData.size = m_rawData.size(); |
117 | |
118 | m_demuxer = WebPDemux(data: &m_webpData); |
119 | if (m_demuxer == NULL) |
120 | return false; |
121 | |
122 | m_formatFlags = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_FORMAT_FLAGS); |
123 | return true; |
124 | } |
125 | |
126 | bool QWebpHandler::read(QImage *image) |
127 | { |
128 | if (!ensureScanned() || !ensureDemuxer()) |
129 | return false; |
130 | |
131 | QRect prevFrameRect; |
132 | if (m_iter.frame_num == 0) { |
133 | // Read global meta-data chunks first |
134 | WebPChunkIterator metaDataIter; |
135 | if ((m_formatFlags & ICCP_FLAG) && WebPDemuxGetChunk(dmux: m_demuxer, fourcc: "ICCP" , chunk_number: 1, iter: &metaDataIter)) { |
136 | QByteArray iccProfile = QByteArray::fromRawData(data: reinterpret_cast<const char *>(metaDataIter.chunk.bytes), |
137 | size: metaDataIter.chunk.size); |
138 | // Ensure the profile is 4-byte aligned. |
139 | if (reinterpret_cast<qintptr>(iccProfile.constData()) & 0x3) |
140 | iccProfile.detach(); |
141 | m_colorSpace = QColorSpace::fromIccProfile(iccProfile); |
142 | // ### consider parsing EXIF and/or XMP metadata too. |
143 | WebPDemuxReleaseChunkIterator(iter: &metaDataIter); |
144 | } |
145 | |
146 | // Go to first frame |
147 | if (!WebPDemuxGetFrame(dmux: m_demuxer, frame_number: 1, iter: &m_iter)) |
148 | return false; |
149 | } else { |
150 | if (m_iter.has_alpha && m_iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) |
151 | prevFrameRect = currentImageRect(); |
152 | |
153 | // Go to next frame |
154 | if (!WebPDemuxNextFrame(iter: &m_iter)) |
155 | return false; |
156 | } |
157 | |
158 | WebPBitstreamFeatures features; |
159 | VP8StatusCode status = WebPGetFeatures(data: m_iter.fragment.bytes, data_size: m_iter.fragment.size, features: &features); |
160 | if (status != VP8_STATUS_OK) |
161 | return false; |
162 | |
163 | QImage::Format format = m_features.has_alpha ? QImage::Format_ARGB32 : QImage::Format_RGB32; |
164 | QImage frame; |
165 | if (!QImageIOHandler::allocateImage(size: QSize(m_iter.width, m_iter.height), format, image: &frame)) |
166 | return false; |
167 | uint8_t *output = frame.bits(); |
168 | size_t output_size = frame.sizeInBytes(); |
169 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
170 | if (!WebPDecodeBGRAInto( |
171 | data: reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), data_size: m_iter.fragment.size, |
172 | output_buffer: output, output_buffer_size: output_size, output_stride: frame.bytesPerLine())) |
173 | #else |
174 | if (!WebPDecodeARGBInto( |
175 | reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), m_iter.fragment.size, |
176 | output, output_size, frame.bytesPerLine())) |
177 | #endif |
178 | return false; |
179 | |
180 | if (!m_features.has_animation) { |
181 | // Single image |
182 | *image = frame; |
183 | } else { |
184 | // Animation |
185 | QPainter painter(m_composited); |
186 | if (!prevFrameRect.isEmpty()) { |
187 | painter.setCompositionMode(QPainter::CompositionMode_Clear); |
188 | painter.fillRect(r: prevFrameRect, c: Qt::black); |
189 | } |
190 | if (m_features.has_alpha) { |
191 | if (m_iter.blend_method == WEBP_MUX_NO_BLEND) |
192 | painter.setCompositionMode(QPainter::CompositionMode_Source); |
193 | else |
194 | painter.setCompositionMode(QPainter::CompositionMode_SourceOver); |
195 | } |
196 | painter.drawImage(r: currentImageRect(), image: frame); |
197 | |
198 | *image = *m_composited; |
199 | } |
200 | image->setColorSpace(m_colorSpace); |
201 | |
202 | return true; |
203 | } |
204 | |
205 | bool QWebpHandler::write(const QImage &image) |
206 | { |
207 | if (image.isNull()) { |
208 | qWarning() << "source image is null." ; |
209 | return false; |
210 | } |
211 | if (std::max(a: image.width(), b: image.height()) > WEBP_MAX_DIMENSION) { |
212 | qWarning() << "QWebpHandler::write() source image too large for WebP: " << image.size(); |
213 | return false; |
214 | } |
215 | |
216 | QImage srcImage = image; |
217 | bool alpha = srcImage.hasAlphaChannel(); |
218 | QImage::Format newFormat = alpha ? QImage::Format_RGBA8888 : QImage::Format_RGB888; |
219 | if (srcImage.format() != newFormat) |
220 | srcImage = srcImage.convertToFormat(f: newFormat); |
221 | |
222 | WebPPicture picture; |
223 | WebPConfig config; |
224 | |
225 | if (!WebPPictureInit(picture: &picture) || !WebPConfigInit(config: &config)) { |
226 | qWarning() << "failed to init webp picture and config" ; |
227 | return false; |
228 | } |
229 | |
230 | picture.width = srcImage.width(); |
231 | picture.height = srcImage.height(); |
232 | picture.use_argb = 1; |
233 | bool failed = false; |
234 | if (alpha) |
235 | failed = !WebPPictureImportRGBA(picture: &picture, rgba: srcImage.bits(), rgba_stride: srcImage.bytesPerLine()); |
236 | else |
237 | failed = !WebPPictureImportRGB(picture: &picture, rgb: srcImage.bits(), rgb_stride: srcImage.bytesPerLine()); |
238 | |
239 | if (failed) { |
240 | qWarning() << "failed to import image data to webp picture." ; |
241 | WebPPictureFree(picture: &picture); |
242 | return false; |
243 | } |
244 | |
245 | int reqQuality = m_quality < 0 ? 75 : qMin(a: m_quality, b: 100); |
246 | if (reqQuality < 100) { |
247 | config.lossless = 0; |
248 | config.quality = reqQuality; |
249 | } else { |
250 | config.lossless = 1; |
251 | config.quality = 70; // For lossless, specifies compression effort; 70 is libwebp default |
252 | } |
253 | config.alpha_quality = config.quality; |
254 | WebPMemoryWriter writer; |
255 | WebPMemoryWriterInit(writer: &writer); |
256 | picture.writer = WebPMemoryWrite; |
257 | picture.custom_ptr = &writer; |
258 | |
259 | if (!WebPEncode(config: &config, picture: &picture)) { |
260 | qWarning() << "failed to encode webp picture, error code: " << picture.error_code; |
261 | WebPPictureFree(picture: &picture); |
262 | WebPMemoryWriterClear(writer: &writer); |
263 | return false; |
264 | } |
265 | |
266 | bool res = false; |
267 | if (image.colorSpace().isValid()) { |
268 | int copy_data = 0; |
269 | WebPMux *mux = WebPMuxNew(); |
270 | WebPData image_data = { .bytes: writer.mem, .size: writer.size }; |
271 | WebPMuxSetImage(mux, bitstream: &image_data, copy_data); |
272 | uint8_t vp8xChunk[10]; |
273 | uint8_t flags = 0x20; // Has ICCP chunk, no XMP, EXIF or animation. |
274 | if (image.hasAlphaChannel()) |
275 | flags |= 0x10; |
276 | vp8xChunk[0] = flags; |
277 | vp8xChunk[1] = 0; |
278 | vp8xChunk[2] = 0; |
279 | vp8xChunk[3] = 0; |
280 | const unsigned width = image.width() - 1; |
281 | const unsigned height = image.height() - 1; |
282 | vp8xChunk[4] = width & 0xff; |
283 | vp8xChunk[5] = (width >> 8) & 0xff; |
284 | vp8xChunk[6] = (width >> 16) & 0xff; |
285 | vp8xChunk[7] = height & 0xff; |
286 | vp8xChunk[8] = (height >> 8) & 0xff; |
287 | vp8xChunk[9] = (height >> 16) & 0xff; |
288 | WebPData vp8x_data = { .bytes: vp8xChunk, .size: 10 }; |
289 | if (WebPMuxSetChunk(mux, fourcc: "VP8X" , chunk_data: &vp8x_data, copy_data) == WEBP_MUX_OK) { |
290 | QByteArray iccProfile = image.colorSpace().iccProfile(); |
291 | WebPData iccp_data = { |
292 | .bytes: reinterpret_cast<const uint8_t *>(iccProfile.constData()), |
293 | .size: static_cast<size_t>(iccProfile.size()) |
294 | }; |
295 | if (WebPMuxSetChunk(mux, fourcc: "ICCP" , chunk_data: &iccp_data, copy_data) == WEBP_MUX_OK) { |
296 | WebPData output_data; |
297 | if (WebPMuxAssemble(mux, assembled_data: &output_data) == WEBP_MUX_OK) { |
298 | res = (output_data.size == |
299 | static_cast<size_t>(device()->write(data: reinterpret_cast<const char *>(output_data.bytes), len: output_data.size))); |
300 | } |
301 | WebPDataClear(webp_data: &output_data); |
302 | } |
303 | } |
304 | WebPMuxDelete(mux); |
305 | } |
306 | if (!res) { |
307 | res = (writer.size == |
308 | static_cast<size_t>(device()->write(data: reinterpret_cast<const char *>(writer.mem), len: writer.size))); |
309 | } |
310 | WebPPictureFree(picture: &picture); |
311 | WebPMemoryWriterClear(writer: &writer); |
312 | |
313 | return res; |
314 | } |
315 | |
316 | QVariant QWebpHandler::option(ImageOption option) const |
317 | { |
318 | if (!supportsOption(option) || !ensureScanned()) |
319 | return QVariant(); |
320 | |
321 | switch (option) { |
322 | case Quality: |
323 | return m_quality; |
324 | case Size: |
325 | return QSize(m_features.width, m_features.height); |
326 | case Animation: |
327 | return m_features.has_animation; |
328 | case BackgroundColor: |
329 | return m_bgColor; |
330 | default: |
331 | return QVariant(); |
332 | } |
333 | } |
334 | |
335 | void QWebpHandler::setOption(ImageOption option, const QVariant &value) |
336 | { |
337 | switch (option) { |
338 | case Quality: |
339 | m_quality = value.toInt(); |
340 | return; |
341 | default: |
342 | break; |
343 | } |
344 | QImageIOHandler::setOption(option, value); |
345 | } |
346 | |
347 | bool QWebpHandler::supportsOption(ImageOption option) const |
348 | { |
349 | return option == Quality |
350 | || option == Size |
351 | || option == Animation |
352 | || option == BackgroundColor; |
353 | } |
354 | |
355 | int QWebpHandler::imageCount() const |
356 | { |
357 | if (!ensureScanned()) |
358 | return 0; |
359 | |
360 | if (!m_features.has_animation) |
361 | return 1; |
362 | |
363 | return m_frameCount; |
364 | } |
365 | |
366 | int QWebpHandler::currentImageNumber() const |
367 | { |
368 | if (!ensureScanned() || !m_features.has_animation) |
369 | return 0; |
370 | |
371 | // Frame number in WebP starts from 1 |
372 | return m_iter.frame_num - 1; |
373 | } |
374 | |
375 | QRect QWebpHandler::currentImageRect() const |
376 | { |
377 | if (!ensureScanned()) |
378 | return QRect(); |
379 | |
380 | return QRect(m_iter.x_offset, m_iter.y_offset, m_iter.width, m_iter.height); |
381 | } |
382 | |
383 | int QWebpHandler::loopCount() const |
384 | { |
385 | if (!ensureScanned() || !m_features.has_animation) |
386 | return 0; |
387 | |
388 | // Loop count in WebP starts from 0 |
389 | return m_loop - 1; |
390 | } |
391 | |
392 | int QWebpHandler::nextImageDelay() const |
393 | { |
394 | if (!ensureScanned() || !m_features.has_animation) |
395 | return 0; |
396 | |
397 | return m_iter.duration; |
398 | } |
399 | |