| 1 | // Copyright (C) 2019 The Qt Company Ltd. |
| 2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
| 3 | |
| 4 | #include "qwebphandler_p.h" |
| 5 | #include "webp/mux.h" |
| 6 | #include "webp/encode.h" |
| 7 | #include <qcolor.h> |
| 8 | #include <qimage.h> |
| 9 | #include <qdebug.h> |
| 10 | #include <qpainter.h> |
| 11 | #include <qvariant.h> |
| 12 | #include <QtEndian> |
| 13 | |
| 14 | static const int = 12; // RIFF_HEADER_SIZE from webp/format_constants.h |
| 15 | |
| 16 | QWebpHandler::QWebpHandler() : |
| 17 | m_quality(75), |
| 18 | m_scanState(ScanNotScanned), |
| 19 | m_features(), |
| 20 | m_formatFlags(0), |
| 21 | m_loop(0), |
| 22 | m_frameCount(0), |
| 23 | m_demuxer(NULL), |
| 24 | m_composited(NULL) |
| 25 | { |
| 26 | memset(s: &m_iter, c: 0, n: sizeof(m_iter)); |
| 27 | } |
| 28 | |
| 29 | QWebpHandler::~QWebpHandler() |
| 30 | { |
| 31 | WebPDemuxReleaseIterator(iter: &m_iter); |
| 32 | WebPDemuxDelete(dmux: m_demuxer); |
| 33 | delete m_composited; |
| 34 | } |
| 35 | |
| 36 | bool QWebpHandler::canRead() const |
| 37 | { |
| 38 | if (m_scanState == ScanNotScanned && !canRead(device: device())) |
| 39 | return false; |
| 40 | |
| 41 | if (m_scanState != ScanError) { |
| 42 | setFormat(QByteArrayLiteral("webp" )); |
| 43 | |
| 44 | if (m_features.has_animation && m_iter.frame_num >= m_frameCount) |
| 45 | return false; |
| 46 | |
| 47 | return true; |
| 48 | } |
| 49 | return false; |
| 50 | } |
| 51 | |
| 52 | bool QWebpHandler::canRead(QIODevice *device) |
| 53 | { |
| 54 | if (!device) { |
| 55 | qWarning(msg: "QWebpHandler::canRead() called with no device" ); |
| 56 | return false; |
| 57 | } |
| 58 | |
| 59 | QByteArray = device->peek(maxlen: riffHeaderSize); |
| 60 | return header.startsWith(bv: "RIFF" ) && header.endsWith(bv: "WEBP" ); |
| 61 | } |
| 62 | |
| 63 | bool QWebpHandler::ensureScanned() const |
| 64 | { |
| 65 | if (m_scanState != ScanNotScanned) |
| 66 | return m_scanState == ScanSuccess; |
| 67 | |
| 68 | m_scanState = ScanError; |
| 69 | |
| 70 | QWebpHandler *that = const_cast<QWebpHandler *>(this); |
| 71 | const int = sizeof(WebPBitstreamFeatures); |
| 72 | QByteArray = device()->peek(maxlen: headerBytesNeeded); |
| 73 | if (header.size() < headerBytesNeeded) |
| 74 | return false; |
| 75 | |
| 76 | // We do no random access during decoding, just a readAll() of the whole image file. So if |
| 77 | // if it is all available already, we can accept a sequential device. The riff header contains |
| 78 | // the file size minus 8 bytes header |
| 79 | qint64 byteSize = qFromLittleEndian<quint32>(src: header.constData() + 4); |
| 80 | if (device()->isSequential() && device()->bytesAvailable() < byteSize + 8) { |
| 81 | qWarning() << "QWebpHandler: Insufficient data available in sequential device" ; |
| 82 | return false; |
| 83 | } |
| 84 | if (WebPGetFeatures(data: (const uint8_t*)header.constData(), data_size: header.size(), features: &(that->m_features)) == VP8_STATUS_OK) { |
| 85 | if (m_features.has_animation) { |
| 86 | // For animation, we have to read and scan whole file to determine loop count and images count |
| 87 | if (that->ensureDemuxer()) { |
| 88 | that->m_loop = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_LOOP_COUNT); |
| 89 | that->m_frameCount = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_FRAME_COUNT); |
| 90 | that->m_bgColor = QColor::fromRgba(rgba: QRgb(WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_BACKGROUND_COLOR))); |
| 91 | |
| 92 | QSize sz(that->m_features.width, that->m_features.height); |
| 93 | that->m_composited = new QImage; |
| 94 | if (!QImageIOHandler::allocateImage(size: sz, format: QImage::Format_ARGB32, image: that->m_composited)) |
| 95 | return false; |
| 96 | if (that->m_features.has_alpha) |
| 97 | that->m_composited->fill(color: Qt::transparent); |
| 98 | |
| 99 | m_scanState = ScanSuccess; |
| 100 | } |
| 101 | } else { |
| 102 | m_scanState = ScanSuccess; |
| 103 | } |
| 104 | } |
| 105 | |
| 106 | return m_scanState == ScanSuccess; |
| 107 | } |
| 108 | |
| 109 | bool QWebpHandler::ensureDemuxer() |
| 110 | { |
| 111 | if (m_demuxer) |
| 112 | return true; |
| 113 | |
| 114 | m_rawData = device()->readAll(); |
| 115 | m_webpData.bytes = reinterpret_cast<const uint8_t *>(m_rawData.constData()); |
| 116 | m_webpData.size = m_rawData.size(); |
| 117 | |
| 118 | m_demuxer = WebPDemux(data: &m_webpData); |
| 119 | if (m_demuxer == NULL) |
| 120 | return false; |
| 121 | |
| 122 | m_formatFlags = WebPDemuxGetI(dmux: m_demuxer, feature: WEBP_FF_FORMAT_FLAGS); |
| 123 | return true; |
| 124 | } |
| 125 | |
| 126 | bool QWebpHandler::read(QImage *image) |
| 127 | { |
| 128 | if (!ensureScanned() || !ensureDemuxer()) |
| 129 | return false; |
| 130 | |
| 131 | QRect prevFrameRect; |
| 132 | if (m_iter.frame_num == 0) { |
| 133 | // Read global meta-data chunks first |
| 134 | WebPChunkIterator metaDataIter; |
| 135 | if ((m_formatFlags & ICCP_FLAG) && WebPDemuxGetChunk(dmux: m_demuxer, fourcc: "ICCP" , chunk_number: 1, iter: &metaDataIter)) { |
| 136 | QByteArray iccProfile = QByteArray::fromRawData(data: reinterpret_cast<const char *>(metaDataIter.chunk.bytes), |
| 137 | size: metaDataIter.chunk.size); |
| 138 | // Ensure the profile is 4-byte aligned. |
| 139 | if (reinterpret_cast<qintptr>(iccProfile.constData()) & 0x3) |
| 140 | iccProfile.detach(); |
| 141 | m_colorSpace = QColorSpace::fromIccProfile(iccProfile); |
| 142 | // ### consider parsing EXIF and/or XMP metadata too. |
| 143 | WebPDemuxReleaseChunkIterator(iter: &metaDataIter); |
| 144 | } |
| 145 | |
| 146 | // Go to first frame |
| 147 | if (!WebPDemuxGetFrame(dmux: m_demuxer, frame_number: 1, iter: &m_iter)) |
| 148 | return false; |
| 149 | } else { |
| 150 | if (m_iter.has_alpha && m_iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) |
| 151 | prevFrameRect = currentImageRect(); |
| 152 | |
| 153 | // Go to next frame |
| 154 | if (!WebPDemuxNextFrame(iter: &m_iter)) |
| 155 | return false; |
| 156 | } |
| 157 | |
| 158 | WebPBitstreamFeatures features; |
| 159 | VP8StatusCode status = WebPGetFeatures(data: m_iter.fragment.bytes, data_size: m_iter.fragment.size, features: &features); |
| 160 | if (status != VP8_STATUS_OK) |
| 161 | return false; |
| 162 | |
| 163 | QImage::Format format = m_features.has_alpha ? QImage::Format_ARGB32 : QImage::Format_RGB32; |
| 164 | QImage frame; |
| 165 | if (!QImageIOHandler::allocateImage(size: QSize(m_iter.width, m_iter.height), format, image: &frame)) |
| 166 | return false; |
| 167 | uint8_t *output = frame.bits(); |
| 168 | size_t output_size = frame.sizeInBytes(); |
| 169 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
| 170 | if (!WebPDecodeBGRAInto( |
| 171 | data: reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), data_size: m_iter.fragment.size, |
| 172 | output_buffer: output, output_buffer_size: output_size, output_stride: frame.bytesPerLine())) |
| 173 | #else |
| 174 | if (!WebPDecodeARGBInto( |
| 175 | reinterpret_cast<const uint8_t*>(m_iter.fragment.bytes), m_iter.fragment.size, |
| 176 | output, output_size, frame.bytesPerLine())) |
| 177 | #endif |
| 178 | return false; |
| 179 | |
| 180 | if (!m_features.has_animation) { |
| 181 | // Single image |
| 182 | *image = frame; |
| 183 | } else { |
| 184 | // Animation |
| 185 | QPainter painter(m_composited); |
| 186 | if (!prevFrameRect.isEmpty()) { |
| 187 | painter.setCompositionMode(QPainter::CompositionMode_Clear); |
| 188 | painter.fillRect(r: prevFrameRect, c: Qt::black); |
| 189 | } |
| 190 | if (m_features.has_alpha) { |
| 191 | if (m_iter.blend_method == WEBP_MUX_NO_BLEND) |
| 192 | painter.setCompositionMode(QPainter::CompositionMode_Source); |
| 193 | else |
| 194 | painter.setCompositionMode(QPainter::CompositionMode_SourceOver); |
| 195 | } |
| 196 | painter.drawImage(r: currentImageRect(), image: frame); |
| 197 | |
| 198 | *image = *m_composited; |
| 199 | } |
| 200 | image->setColorSpace(m_colorSpace); |
| 201 | |
| 202 | return true; |
| 203 | } |
| 204 | |
| 205 | bool QWebpHandler::write(const QImage &image) |
| 206 | { |
| 207 | if (image.isNull()) { |
| 208 | qWarning() << "source image is null." ; |
| 209 | return false; |
| 210 | } |
| 211 | if (std::max(a: image.width(), b: image.height()) > WEBP_MAX_DIMENSION) { |
| 212 | qWarning() << "QWebpHandler::write() source image too large for WebP: " << image.size(); |
| 213 | return false; |
| 214 | } |
| 215 | |
| 216 | const bool alpha = image.hasAlphaChannel(); |
| 217 | QImage::Format newFormat = alpha ? QImage::Format_RGBA8888 : QImage::Format_RGB888; |
| 218 | const QImage srcImage = (image.format() == newFormat) ? image : image.convertedTo(f: newFormat); |
| 219 | |
| 220 | WebPPicture picture; |
| 221 | WebPConfig config; |
| 222 | |
| 223 | if (!WebPPictureInit(picture: &picture) || !WebPConfigInit(config: &config)) { |
| 224 | qWarning() << "failed to init webp picture and config" ; |
| 225 | return false; |
| 226 | } |
| 227 | |
| 228 | picture.width = srcImage.width(); |
| 229 | picture.height = srcImage.height(); |
| 230 | picture.use_argb = 1; |
| 231 | bool failed = false; |
| 232 | if (alpha) |
| 233 | failed = !WebPPictureImportRGBA(picture: &picture, rgba: srcImage.constBits(), rgba_stride: srcImage.bytesPerLine()); |
| 234 | else |
| 235 | failed = !WebPPictureImportRGB(picture: &picture, rgb: srcImage.constBits(), rgb_stride: srcImage.bytesPerLine()); |
| 236 | |
| 237 | if (failed) { |
| 238 | qWarning() << "failed to import image data to webp picture." ; |
| 239 | WebPPictureFree(picture: &picture); |
| 240 | return false; |
| 241 | } |
| 242 | |
| 243 | int reqQuality = m_quality < 0 ? 75 : qMin(a: m_quality, b: 100); |
| 244 | if (reqQuality < 100) { |
| 245 | config.lossless = 0; |
| 246 | config.quality = reqQuality; |
| 247 | } else { |
| 248 | config.lossless = 1; |
| 249 | config.quality = 70; // For lossless, specifies compression effort; 70 is libwebp default |
| 250 | } |
| 251 | config.alpha_quality = config.quality; |
| 252 | WebPMemoryWriter writer; |
| 253 | WebPMemoryWriterInit(writer: &writer); |
| 254 | picture.writer = WebPMemoryWrite; |
| 255 | picture.custom_ptr = &writer; |
| 256 | |
| 257 | if (!WebPEncode(config: &config, picture: &picture)) { |
| 258 | qWarning() << "failed to encode webp picture, error code: " << picture.error_code; |
| 259 | WebPPictureFree(picture: &picture); |
| 260 | WebPMemoryWriterClear(writer: &writer); |
| 261 | return false; |
| 262 | } |
| 263 | |
| 264 | bool res = false; |
| 265 | if (image.colorSpace().isValid()) { |
| 266 | int copy_data = 0; |
| 267 | WebPMux *mux = WebPMuxNew(); |
| 268 | WebPData image_data = { .bytes: writer.mem, .size: writer.size }; |
| 269 | WebPMuxSetImage(mux, bitstream: &image_data, copy_data); |
| 270 | uint8_t vp8xChunk[10]; |
| 271 | uint8_t flags = 0x20; // Has ICCP chunk, no XMP, EXIF or animation. |
| 272 | if (image.hasAlphaChannel()) |
| 273 | flags |= 0x10; |
| 274 | vp8xChunk[0] = flags; |
| 275 | vp8xChunk[1] = 0; |
| 276 | vp8xChunk[2] = 0; |
| 277 | vp8xChunk[3] = 0; |
| 278 | const unsigned width = image.width() - 1; |
| 279 | const unsigned height = image.height() - 1; |
| 280 | vp8xChunk[4] = width & 0xff; |
| 281 | vp8xChunk[5] = (width >> 8) & 0xff; |
| 282 | vp8xChunk[6] = (width >> 16) & 0xff; |
| 283 | vp8xChunk[7] = height & 0xff; |
| 284 | vp8xChunk[8] = (height >> 8) & 0xff; |
| 285 | vp8xChunk[9] = (height >> 16) & 0xff; |
| 286 | WebPData vp8x_data = { .bytes: vp8xChunk, .size: 10 }; |
| 287 | if (WebPMuxSetChunk(mux, fourcc: "VP8X" , chunk_data: &vp8x_data, copy_data) == WEBP_MUX_OK) { |
| 288 | QByteArray iccProfile = image.colorSpace().iccProfile(); |
| 289 | WebPData iccp_data = { |
| 290 | .bytes: reinterpret_cast<const uint8_t *>(iccProfile.constData()), |
| 291 | .size: static_cast<size_t>(iccProfile.size()) |
| 292 | }; |
| 293 | if (WebPMuxSetChunk(mux, fourcc: "ICCP" , chunk_data: &iccp_data, copy_data) == WEBP_MUX_OK) { |
| 294 | WebPData output_data; |
| 295 | if (WebPMuxAssemble(mux, assembled_data: &output_data) == WEBP_MUX_OK) { |
| 296 | res = (output_data.size == |
| 297 | static_cast<size_t>(device()->write(data: reinterpret_cast<const char *>(output_data.bytes), len: output_data.size))); |
| 298 | } |
| 299 | WebPDataClear(webp_data: &output_data); |
| 300 | } |
| 301 | } |
| 302 | WebPMuxDelete(mux); |
| 303 | } |
| 304 | if (!res) { |
| 305 | res = (writer.size == |
| 306 | static_cast<size_t>(device()->write(data: reinterpret_cast<const char *>(writer.mem), len: writer.size))); |
| 307 | } |
| 308 | WebPPictureFree(picture: &picture); |
| 309 | WebPMemoryWriterClear(writer: &writer); |
| 310 | |
| 311 | return res; |
| 312 | } |
| 313 | |
| 314 | QVariant QWebpHandler::option(ImageOption option) const |
| 315 | { |
| 316 | if (!supportsOption(option) || !ensureScanned()) |
| 317 | return QVariant(); |
| 318 | |
| 319 | switch (option) { |
| 320 | case Quality: |
| 321 | return m_quality; |
| 322 | case Size: |
| 323 | return QSize(m_features.width, m_features.height); |
| 324 | case Animation: |
| 325 | return m_features.has_animation; |
| 326 | case BackgroundColor: |
| 327 | return m_bgColor; |
| 328 | default: |
| 329 | return QVariant(); |
| 330 | } |
| 331 | } |
| 332 | |
| 333 | void QWebpHandler::setOption(ImageOption option, const QVariant &value) |
| 334 | { |
| 335 | switch (option) { |
| 336 | case Quality: |
| 337 | m_quality = value.toInt(); |
| 338 | return; |
| 339 | default: |
| 340 | break; |
| 341 | } |
| 342 | QImageIOHandler::setOption(option, value); |
| 343 | } |
| 344 | |
| 345 | bool QWebpHandler::supportsOption(ImageOption option) const |
| 346 | { |
| 347 | return option == Quality |
| 348 | || option == Size |
| 349 | || option == Animation |
| 350 | || option == BackgroundColor; |
| 351 | } |
| 352 | |
| 353 | int QWebpHandler::imageCount() const |
| 354 | { |
| 355 | if (!ensureScanned()) |
| 356 | return 0; |
| 357 | |
| 358 | if (!m_features.has_animation) |
| 359 | return 1; |
| 360 | |
| 361 | return m_frameCount; |
| 362 | } |
| 363 | |
| 364 | int QWebpHandler::currentImageNumber() const |
| 365 | { |
| 366 | if (!ensureScanned() || !m_features.has_animation) |
| 367 | return 0; |
| 368 | |
| 369 | // Frame number in WebP starts from 1 |
| 370 | return m_iter.frame_num - 1; |
| 371 | } |
| 372 | |
| 373 | QRect QWebpHandler::currentImageRect() const |
| 374 | { |
| 375 | if (!ensureScanned()) |
| 376 | return QRect(); |
| 377 | |
| 378 | return QRect(m_iter.x_offset, m_iter.y_offset, m_iter.width, m_iter.height); |
| 379 | } |
| 380 | |
| 381 | int QWebpHandler::loopCount() const |
| 382 | { |
| 383 | if (!ensureScanned() || !m_features.has_animation) |
| 384 | return 0; |
| 385 | |
| 386 | // Loop count in WebP starts from 0 |
| 387 | return m_loop - 1; |
| 388 | } |
| 389 | |
| 390 | int QWebpHandler::nextImageDelay() const |
| 391 | { |
| 392 | if (!ensureScanned() || !m_features.has_animation) |
| 393 | return 0; |
| 394 | |
| 395 | return m_iter.duration; |
| 396 | } |
| 397 | |