1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qvideoframeformat.h"
5#include "qvideotexturehelper_p.h"
6#include "qvideotransformation_p.h"
7
8#include <qdebug.h>
9#include <qlist.h>
10#include <qmetatype.h>
11#include <qvariant.h>
12#include <qmatrix4x4.h>
13
14static void initResource() {
15 Q_INIT_RESOURCE(qtmultimedia_shaders);
16}
17
18QT_BEGIN_NAMESPACE
19
20class QVideoFrameFormatPrivate : public QSharedData
21{
22public:
23 QVideoFrameFormatPrivate() = default;
24
25 QVideoFrameFormatPrivate(
26 const QSize &size,
27 QVideoFrameFormat::PixelFormat format)
28 : pixelFormat(format)
29 , frameSize(size)
30 , viewport(QPoint(0, 0), size)
31 {
32 }
33
34 bool operator ==(const QVideoFrameFormatPrivate &other) const
35 {
36 if (pixelFormat == other.pixelFormat && scanLineDirection == other.scanLineDirection
37 && frameSize == other.frameSize && viewport == other.viewport
38 && frameRatesEqual(r1: frameRate, r2: other.frameRate) && colorSpace == other.colorSpace
39 && transformation == other.transformation)
40 return true;
41
42 return false;
43 }
44
45 inline static bool frameRatesEqual(qreal r1, qreal r2)
46 {
47 return qAbs(t: r1 - r2) <= 0.00001 * qMin(a: qAbs(t: r1), b: qAbs(t: r2));
48 }
49
50 QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
51 QVideoFrameFormat::Direction scanLineDirection = QVideoFrameFormat::TopToBottom;
52 QSize frameSize;
53 QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
54 QVideoFrameFormat::ColorTransfer colorTransfer = QVideoFrameFormat::ColorTransfer_Unknown;
55 QVideoFrameFormat::ColorRange colorRange = QVideoFrameFormat::ColorRange_Unknown;
56 QRect viewport;
57 float frameRate = 0.0;
58 float maxLuminance = -1.;
59 VideoTransformation transformation;
60};
61
62QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate);
63
64/*!
65 \class QVideoFrameFormat
66 \brief The QVideoFrameFormat class specifies the stream format of a video presentation
67 surface.
68 \inmodule QtMultimedia
69
70 \ingroup multimedia
71 \ingroup multimedia_video
72
73 A video sink presents a stream of video frames. QVideoFrameFormat describes the type of
74 the frames and determines how they should be presented.
75
76 The core properties of a video stream required to set up a video sink are the pixel format
77 given by pixelFormat(), and the frame dimensions given by frameSize().
78
79 The region of a frame that is actually displayed on a video surface is given by the viewport().
80 A stream may have a viewport less than the entire region of a frame to allow for videos smaller
81 than the nearest optimal size of a video frame. For example the width of a frame may be
82 extended so that the start of each scan line is eight byte aligned.
83
84 Other common properties are the scanLineDirection(), frameRate() and the yCrCbColorSpace().
85*/
86
87/*!
88 \enum QVideoFrameFormat::PixelFormat
89
90 Enumerates video data types.
91
92 \value Format_Invalid
93 The frame is invalid.
94
95 \value Format_ARGB8888
96 The frame is stored using a ARGB format with 8 bits per component.
97
98 \value Format_ARGB8888_Premultiplied
99 The frame stored using a premultiplied ARGB format with 8 bits per component.
100
101 \value Format_XRGB8888
102 The frame stored using a 32 bits per pixel RGB format (0xff, R, G, B).
103
104 \value Format_BGRA8888
105 The frame is stored using a 32-bit BGRA format (0xBBGGRRAA).
106
107 \value Format_BGRA8888_Premultiplied
108 The frame is stored using a premultiplied 32bit BGRA format.
109
110 \value Format_ABGR8888
111 The frame is stored using a 32-bit ABGR format (0xAABBGGRR).
112
113 \value Format_XBGR8888
114 The frame is stored using a 32-bit BGR format (0xffBBGGRR).
115
116 \value Format_RGBA8888
117 The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address.
118
119 \value Format_BGRX8888
120 The frame is stored in format 32-bit BGRx format, [31:0] B:G:R:x 8:8:8:8 little endian
121
122 \value Format_RGBX8888
123 The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address.
124
125 \value Format_AYUV
126 The frame is stored using a packed 32-bit AYUV format (0xAAYYUUVV).
127
128 \value Format_AYUV_Premultiplied
129 The frame is stored using a packed premultiplied 32-bit AYUV format (0xAAYYUUVV).
130
131 \value Format_YUV420P
132 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
133 horizontally and vertically sub-sampled, i.e. the height and width of the U and V planes are
134 half that of the Y plane.
135
136 \value Format_YUV422P
137 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
138 horizontally sub-sampled, i.e. the width of the U and V planes are
139 half that of the Y plane, and height of U and V planes is the same as Y.
140
141 \value Format_YV12
142 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
143 horizontally and vertically sub-sampled, i.e. the height and width of the V and U planes are
144 half that of the Y plane.
145
146 \value Format_UYVY
147 The frame is stored using an 8-bit per component packed YUV format with the U and V planes
148 horizontally sub-sampled (U-Y-V-Y), i.e. two horizontally adjacent pixels are stored as a 32-bit
149 macropixel which has a Y value for each pixel and common U and V values.
150
151 \value Format_YUYV
152 The frame is stored using an 8-bit per component packed YUV format with the U and V planes
153 horizontally sub-sampled (Y-U-Y-V), i.e. two horizontally adjacent pixels are stored as a 32-bit
154 macropixel which has a Y value for each pixel and common U and V values.
155
156 \value Format_NV12
157 The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y)
158 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V).
159
160 \value Format_NV21
161 The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y)
162 followed by a horizontally and vertically sub-sampled, packed VU plane (V-U).
163
164 \value Format_IMC1
165 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
166 horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except
167 that the bytes per line of the U and V planes are padded out to the same stride as the Y plane.
168
169 \value Format_IMC2
170 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
171 horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except
172 that the lines of the U and V planes are interleaved, i.e. each line of U data is followed by a
173 line of V data creating a single line of the same stride as the Y data.
174
175 \value Format_IMC3
176 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
177 horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that
178 the bytes per line of the V and U planes are padded out to the same stride as the Y plane.
179
180 \value Format_IMC4
181 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
182 horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that
183 the lines of the V and U planes are interleaved, i.e. each line of V data is followed by a line
184 of U data creating a single line of the same stride as the Y data.
185
186 \value Format_P010
187 The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y)
188 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). Only the 10 most
189 significant bits of each component are being used.
190
191 \value Format_P016
192 The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y)
193 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V).
194
195 \value Format_Y8
196 The frame is stored using an 8-bit greyscale format.
197
198 \value Format_Y16
199 The frame is stored using a 16-bit linear greyscale format. Little endian.
200
201 \value Format_Jpeg
202 The frame is stored in compressed Jpeg format.
203
204 \value Format_SamplerExternalOES
205 The frame is stored in external OES texture format. This is currently only being used on Android.
206
207 \value Format_SamplerRect
208 The frame is stored in rectangle texture format (GL_TEXTURE_RECTANGLE). This is only being used on
209 macOS with an OpenGL based Rendering Hardware interface. The underlying pixel format stored in the
210 texture is Format_BRGA8888.
211
212 \value Format_YUV420P10
213 Similar to YUV420, but uses 16bits per component, 10 of those significant.
214*/
215
216/*!
217 \enum QVideoFrameFormat::Direction
218
219 Enumerates the layout direction of video scan lines.
220
221 \value TopToBottom Scan lines are arranged from the top of the frame to the bottom.
222 \value BottomToTop Scan lines are arranged from the bottom of the frame to the top.
223*/
224
225/*!
226 \enum QVideoFrameFormat::YCbCrColorSpace
227
228 \deprecated Use QVideoFrameFormat::ColorSpace instead.
229
230 Enumerates the Y'CbCr color space of video frames.
231
232 \value YCbCr_Undefined
233 No color space is specified.
234
235 \value YCbCr_BT601
236 A Y'CbCr color space defined by ITU-R recommendation BT.601
237 with Y value range from 16 to 235, and Cb/Cr range from 16 to 240.
238 Used mostly by older videos that were targeting CRT displays.
239
240 \value YCbCr_BT709
241 A Y'CbCr color space defined by ITU-R BT.709 with the same values range as YCbCr_BT601.
242 The most commonly used color space today.
243
244 \value YCbCr_xvYCC601
245 This value is deprecated. Please check the \l ColorRange instead.
246 The BT.601 color space with the value range extended to 0 to 255.
247 It is backward compatible with BT.601 and uses values outside BT.601 range to represent a
248 wider range of colors.
249
250 \value YCbCr_xvYCC709
251 This value is deprecated. Please check the \l ColorRange instead.
252 The BT.709 color space with the value range extended to 0 to 255.
253
254 \value YCbCr_JPEG
255 The full range Y'CbCr color space used in most JPEG files.
256
257 \value YCbCr_BT2020
258 The color space defined by ITU-R BT.2020. Used mainly for HDR videos.
259*/
260
261
262/*!
263 \enum QVideoFrameFormat::ColorSpace
264
265 Enumerates the color space of video frames.
266
267 \value ColorSpace_Undefined
268 No color space is specified.
269
270 \value ColorSpace_BT601
271 A color space defined by ITU-R recommendation BT.601
272 with Y value range from 16 to 235, and Cb/Cr range from 16 to 240.
273 Used mostly by older videos that were targeting CRT displays.
274
275 \value ColorSpace_BT709
276 A color space defined by ITU-R BT.709 with the same values range as ColorSpace_BT601.
277 The most commonly used color space today.
278
279 \value ColorSpace_AdobeRgb
280 The full range YUV color space used in most JPEG files.
281
282 \value ColorSpace_BT2020
283 The color space defined by ITU-R BT.2020. Used mainly for HDR videos.
284*/
285
286/*!
287 \enum QVideoFrameFormat::ColorTransfer
288
289 \value ColorTransfer_Unknown
290 The color transfer function is unknown.
291
292 \value ColorTransfer_BT709
293 Color values are encoded according to BT709. See also https://www.itu.int/rec/R-REC-BT.709/en.
294 This is close to, but not identical to a gamma curve of 2.2, and the same transfer curve as is
295 used in sRGB.
296
297 \value ColorTransfer_BT601
298 Color values are encoded according to BT601. See also https://www.itu.int/rec/R-REC-BT.601/en.
299
300 \value ColorTransfer_Linear
301 Color values are linear
302
303 \value ColorTransfer_Gamma22
304 Color values are encoded with a gamma of 2.2
305
306 \value ColorTransfer_Gamma28
307 Color values are encoded with a gamma of 2.8
308
309 \value ColorTransfer_ST2084
310 Color values are encoded using STME ST 2084. This transfer function is the most common HDR
311 transfer function and often called the 'perceptual quantizer'. See also https://www.itu.int/rec/R-REC-BT.2100
312 and https://en.wikipedia.org/wiki/Perceptual_quantizer.
313
314
315 \value ColorTransfer_STD_B67
316 Color values are encoded using ARIB STD B67. This transfer function is also often referred to as 'hybrid log gamma'.
317 See also https://www.itu.int/rec/R-REC-BT.2100 and https://en.wikipedia.org/wiki/Hybrid_log–gamma.
318*/
319
320/*!
321 \enum QVideoFrameFormat::ColorRange
322
323 Describes the color range used by the video data. Video data usually comes in either full
324 color range, where all values are being used, or a more limited range traditionally used in
325 YUV video formats, where a subset of all values is being used.
326
327 \value ColorRange_Unknown
328 The color range of the video is unknown.
329
330 \value ColorRange_Video
331
332 The color range traditionally used by most YUV video formats. For 8 bit formats, the Y component is
333 limited to values between 16 and 235. The U and V components are limited to values between 16 and 240
334
335 For higher bit depths multiply these values with 2^(depth-8).
336
337 \value ColorRange_Full
338
339 Full color range. All values from 0 to 2^depth - 1 are valid.
340*/
341
342/*!
343 Constructs a null video stream format.
344*/
345QVideoFrameFormat::QVideoFrameFormat()
346 : d(new QVideoFrameFormatPrivate)
347{
348 initResource();
349}
350
351/*!
352 Constructs a video stream with the given frame \a size and pixel \a format.
353*/
354QVideoFrameFormat::QVideoFrameFormat(
355 const QSize& size, QVideoFrameFormat::PixelFormat format)
356 : d(new QVideoFrameFormatPrivate(size, format))
357{
358}
359
360/*!
361 Constructs a copy of \a other.
362*/
363QVideoFrameFormat::QVideoFrameFormat(const QVideoFrameFormat &other) = default;
364
365/*!
366 \fn QVideoFrameFormat::QVideoFrameFormat(QVideoFrameFormat &&other)
367
368 Constructs a QVideoFrameFormat by moving from \a other.
369*/
370
371/*!
372 \fn void QVideoFrameFormat::swap(QVideoFrameFormat &other) noexcept
373
374 Swaps the current video frame format with the \a other.
375*/
376
377/*!
378 Assigns the values of \a other to this object.
379*/
380QVideoFrameFormat &QVideoFrameFormat::operator =(const QVideoFrameFormat &other) = default;
381
382/*!
383 \fn QVideoFrameFormat &QVideoFrameFormat::operator =(QVideoFrameFormat &&other)
384
385 Moves \a other into this QVideoFrameFormat.
386*/
387
388/*!
389 Destroys a video stream description.
390*/
391QVideoFrameFormat::~QVideoFrameFormat() = default;
392
393/*!
394 Identifies if a video surface format has a valid pixel format and frame size.
395
396 Returns true if the format is valid, and false otherwise.
397*/
398bool QVideoFrameFormat::isValid() const
399{
400 return d->pixelFormat != Format_Invalid && d->frameSize.isValid();
401}
402
403/*!
404 Returns true if \a other is the same as this video format, and false if they are different.
405*/
406bool QVideoFrameFormat::operator ==(const QVideoFrameFormat &other) const
407{
408 return d == other.d || *d == *other.d;
409}
410
411/*!
412 Returns true if \a other is different to this video format, and false if they are the same.
413*/
414bool QVideoFrameFormat::operator !=(const QVideoFrameFormat &other) const
415{
416 return d != other.d && !(*d == *other.d);
417}
418
419/*!
420 \internal
421*/
422void QVideoFrameFormat::detach()
423{
424 d.detach();
425}
426
427/*!
428 Returns the pixel format of frames in a video stream.
429*/
430QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormat() const
431{
432 return d->pixelFormat;
433}
434
435/*!
436 Returns the dimensions of frames in a video stream.
437
438 \sa frameWidth(), frameHeight()
439*/
440QSize QVideoFrameFormat::frameSize() const
441{
442 return d->frameSize;
443}
444
445/*!
446 Returns the width of frames in a video stream.
447
448 \sa frameSize(), frameHeight()
449*/
450int QVideoFrameFormat::frameWidth() const
451{
452 return d->frameSize.width();
453}
454
455/*!
456 Returns the height of frame in a video stream.
457*/
458int QVideoFrameFormat::frameHeight() const
459{
460 return d->frameSize.height();
461}
462
463/*!
464 Returns the number of planes used.
465 This number is depending on the pixel format and is
466 1 for RGB based formats, and a number between 1 and 3 for
467 YUV based formats.
468*/
469int QVideoFrameFormat::planeCount() const
470{
471 return QVideoTextureHelper::textureDescription(format: d->pixelFormat)->nplanes;
472}
473
474/*!
475 Sets the size of frames in a video stream to \a size.
476
477 This will reset the viewport() to fill the entire frame.
478*/
479void QVideoFrameFormat::setFrameSize(const QSize &size)
480{
481 detach();
482 d->frameSize = size;
483 d->viewport = QRect(QPoint(0, 0), size);
484}
485
486/*!
487 \overload
488
489 Sets the \a width and \a height of frames in a video stream.
490
491 This will reset the viewport() to fill the entire frame.
492*/
493void QVideoFrameFormat::setFrameSize(int width, int height)
494{
495 detach();
496 d->frameSize = QSize(width, height);
497 d->viewport = QRect(0, 0, width, height);
498}
499
500/*!
501 Returns the viewport of a video stream.
502
503 The viewport is the region of a video frame that is actually displayed.
504
505 By default the viewport covers an entire frame.
506*/
507QRect QVideoFrameFormat::viewport() const
508{
509 return d->viewport;
510}
511
512/*!
513 Sets the viewport of a video stream to \a viewport.
514*/
515void QVideoFrameFormat::setViewport(const QRect &viewport)
516{
517 detach();
518 d->viewport = viewport;
519}
520
521/*!
522 Returns the direction of scan lines.
523*/
524QVideoFrameFormat::Direction QVideoFrameFormat::scanLineDirection() const
525{
526 return d->scanLineDirection;
527}
528
529/*!
530 Sets the \a direction of scan lines.
531*/
532void QVideoFrameFormat::setScanLineDirection(Direction direction)
533{
534 detach();
535 d->scanLineDirection = direction;
536}
537
538#if QT_DEPRECATED_SINCE(6, 8)
539/*!
540 Returns the frame rate of a video stream in frames per second.
541*/
542qreal QVideoFrameFormat::frameRate() const
543{
544 return streamFrameRate();
545}
546
547/*!
548 Sets the frame \a rate of a video stream in frames per second.
549*/
550void QVideoFrameFormat::setFrameRate(qreal rate)
551{
552 setStreamFrameRate(rate);
553}
554#endif
555
556/*!
557 Returns the frame rate of a video stream in frames per second.
558*/
559qreal QVideoFrameFormat::streamFrameRate() const
560{
561 return d->frameRate;
562}
563
564/*!
565 Sets the frame \a rate of a video stream in frames per second.
566*/
567void QVideoFrameFormat::setStreamFrameRate(qreal rate)
568{
569 detach();
570 d->frameRate = rate;
571}
572
573#if QT_DEPRECATED_SINCE(6, 4)
574/*!
575 \deprecated Use colorSpace() instead
576
577 Returns the Y'CbCr color space of a video stream.
578*/
579QVideoFrameFormat::YCbCrColorSpace QVideoFrameFormat::yCbCrColorSpace() const
580{
581 return YCbCrColorSpace(d->colorSpace);
582}
583
584/*!
585 \deprecated Use setColorSpace() instead
586
587 Sets the Y'CbCr color \a space of a video stream.
588 It is only used with raw YUV frame types.
589*/
590void QVideoFrameFormat::setYCbCrColorSpace(QVideoFrameFormat::YCbCrColorSpace space)
591{
592 detach();
593 d->colorSpace = ColorSpace(space);
594}
595#endif // QT_DEPRECATED_SINCE(6, 4)
596
597/*!
598 Returns the color space of a video stream.
599*/
600QVideoFrameFormat::ColorSpace QVideoFrameFormat::colorSpace() const
601{
602 return d->colorSpace;
603}
604
605/*!
606 Sets the \a colorSpace of a video stream.
607*/
608void QVideoFrameFormat::setColorSpace(ColorSpace colorSpace)
609{
610 detach();
611 d->colorSpace = colorSpace;
612}
613
614/*!
615 Returns the color transfer function that should be used to render the
616 video stream.
617*/
618QVideoFrameFormat::ColorTransfer QVideoFrameFormat::colorTransfer() const
619{
620 return d->colorTransfer;
621}
622
623/*!
624 Sets the color transfer function that should be used to render the
625 video stream to \a colorTransfer.
626*/
627void QVideoFrameFormat::setColorTransfer(ColorTransfer colorTransfer)
628{
629 detach();
630 d->colorTransfer = colorTransfer;
631}
632
633/*!
634 Returns the color range that should be used to render the
635 video stream.
636*/
637QVideoFrameFormat::ColorRange QVideoFrameFormat::colorRange() const
638{
639 return d->colorRange;
640}
641
642/*!
643 Sets the color transfer range that should be used to render the
644 video stream to \a range.
645*/
646void QVideoFrameFormat::setColorRange(ColorRange range)
647{
648 detach();
649 d->colorRange = range;
650}
651
652/*!
653 Returns \c true if the surface is mirrored around its vertical axis.
654
655 Transformations of \c QVideoFrameFormat, specifically,
656 rotation and mirroring, can be determined by the orientation of
657 the camera sensor, camera settings, or the orientation of
658 the video stream.
659
660 Mirroring is applied after rotation.
661
662 \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage
663 will be mirrored around its x-axis.
664
665 \since 5.11
666 */
667bool QVideoFrameFormat::isMirrored() const
668{
669 return d->transformation.mirroredHorizontallyAfterRotation;
670}
671
672/*!
673 Sets if the surface is \a mirrored around its vertical axis.
674
675 Transformations of \c QVideoFrameFormat, specifically,
676 rotation and mirroring, can be determined by the orientation of
677 the camera sensor, camera settings, or the orientation of
678 the video stream.
679
680 Mirroring is applied after rotation.
681
682 Default value is \c false.
683
684 \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage
685 will be mirrored around its x-axis.
686
687 \since 5.11
688 */
689void QVideoFrameFormat::setMirrored(bool mirrored)
690{
691 detach();
692 d->transformation.mirroredHorizontallyAfterRotation = mirrored;
693}
694
695/*!
696 Returns the angle by which the surface is rotated clockwise.
697
698 Transformations of \c QVideoFrameFormat, specifically,
699 rotation and mirroring, can be determined by the orientation of
700 the camera sensor, camera settings, or the orientation of
701 the video stream.
702
703 Rotation is applied before mirroring.
704 */
705QtVideo::Rotation QVideoFrameFormat::rotation() const
706{
707 return d->transformation.rotation;
708}
709
710/*!
711 Sets the \a angle by which the surface is rotated clockwise.
712
713 Transformations of \c QVideoFrameFormat, specifically,
714 rotation and mirroring, can be determined by the orientation of
715 the camera sensor, camera settings, or the orientation of
716 the video stream.
717
718 Rotation is applied before mirroring.
719
720 Default value is \c QtVideo::Rotation::None.
721 */
722void QVideoFrameFormat::setRotation(QtVideo::Rotation angle)
723{
724 detach();
725 d->transformation.rotation = angle;
726}
727
728/*!
729 \internal
730*/
731QString QVideoFrameFormat::vertexShaderFileName() const
732{
733 return QVideoTextureHelper::vertexShaderFileName(format: *this);
734}
735
736/*!
737 \internal
738*/
739QString QVideoFrameFormat::fragmentShaderFileName() const
740{
741 return QVideoTextureHelper::fragmentShaderFileName(format: *this, rhi: nullptr);
742}
743
744/*!
745 \internal
746*/
747void QVideoFrameFormat::updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const
748{
749 QVideoTextureHelper::updateUniformData(dst, rhi: nullptr, format: *this, frame, transform, opacity);
750}
751
752/*!
753 \internal
754
755 The maximum luminence in nits as set by the HDR metadata. If the video doesn't have meta data, the returned value depends on the
756 maximum that can be encoded by the transfer function.
757*/
758float QVideoFrameFormat::maxLuminance() const
759{
760 if (d->maxLuminance <= 0) {
761 if (d->colorTransfer == ColorTransfer_ST2084)
762 return 10000.; // ST2084 can encode up to 10000 nits
763 if (d->colorTransfer == ColorTransfer_STD_B67)
764 return 1500.; // SRD_B67 can encode up to 1200 nits, use a bit more for some headroom
765 return 100; // SDR
766 }
767 return d->maxLuminance;
768}
769/*!
770 Sets the maximum luminance to the given value, \a lum.
771*/
772void QVideoFrameFormat::setMaxLuminance(float lum)
773{
774 detach();
775 d->maxLuminance = lum;
776}
777
778
779/*!
780 Returns a video pixel format equivalent to an image \a format. If there is no equivalent
781 format QVideoFrameFormat::Format_Invalid is returned instead.
782
783 \note In general \l QImage does not handle YUV formats.
784
785*/
786QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormatFromImageFormat(QImage::Format format)
787{
788 switch (format) {
789#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
790 case QImage::Format_RGB32:
791 return QVideoFrameFormat::Format_BGRX8888;
792 case QImage::Format_ARGB32:
793 return QVideoFrameFormat::Format_BGRA8888;
794 case QImage::Format_ARGB32_Premultiplied:
795 return QVideoFrameFormat::Format_BGRA8888_Premultiplied;
796#else
797 case QImage::Format_RGB32:
798 return QVideoFrameFormat::Format_XRGB8888;
799 case QImage::Format_ARGB32:
800 return QVideoFrameFormat::Format_ARGB8888;
801 case QImage::Format_ARGB32_Premultiplied:
802 return QVideoFrameFormat::Format_ARGB8888_Premultiplied;
803#endif
804 case QImage::Format_RGBA8888:
805 return QVideoFrameFormat::Format_RGBA8888;
806 case QImage::Format_RGBA8888_Premultiplied:
807 // QVideoFrameFormat::Format_RGBA8888_Premultiplied is to be added in 6.8
808 // Format_RGBX8888 suits the best as a workaround
809 return QVideoFrameFormat::Format_RGBX8888;
810 case QImage::Format_RGBX8888:
811 return QVideoFrameFormat::Format_RGBX8888;
812 case QImage::Format_Grayscale8:
813 return QVideoFrameFormat::Format_Y8;
814 case QImage::Format_Grayscale16:
815 return QVideoFrameFormat::Format_Y16;
816 default:
817 return QVideoFrameFormat::Format_Invalid;
818 }
819}
820
821/*!
822 Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
823 format QImage::Format_Invalid is returned instead.
824
825 \note In general \l QImage does not handle YUV formats.
826
827*/
828QImage::Format QVideoFrameFormat::imageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
829{
830 switch (format) {
831#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
832 case QVideoFrameFormat::Format_BGRA8888:
833 return QImage::Format_ARGB32;
834 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
835 return QImage::Format_ARGB32_Premultiplied;
836 case QVideoFrameFormat::Format_BGRX8888:
837 return QImage::Format_RGB32;
838 case QVideoFrameFormat::Format_ARGB8888:
839 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
840 case QVideoFrameFormat::Format_XRGB8888:
841 return QImage::Format_Invalid;
842#else
843 case QVideoFrameFormat::Format_ARGB8888:
844 return QImage::Format_ARGB32;
845 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
846 return QImage::Format_ARGB32_Premultiplied;
847 case QVideoFrameFormat::Format_XRGB8888:
848 return QImage::Format_RGB32;
849 case QVideoFrameFormat::Format_BGRA8888:
850 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
851 case QVideoFrameFormat::Format_BGRX8888:
852 return QImage::Format_Invalid;
853#endif
854 case QVideoFrameFormat::Format_RGBA8888:
855 return QImage::Format_RGBA8888;
856 case QVideoFrameFormat::Format_RGBX8888:
857 return QImage::Format_RGBX8888;
858 case QVideoFrameFormat::Format_Y8:
859 return QImage::Format_Grayscale8;
860 case QVideoFrameFormat::Format_Y16:
861 return QImage::Format_Grayscale16;
862 case QVideoFrameFormat::Format_ABGR8888:
863 case QVideoFrameFormat::Format_XBGR8888:
864 case QVideoFrameFormat::Format_AYUV:
865 case QVideoFrameFormat::Format_AYUV_Premultiplied:
866 case QVideoFrameFormat::Format_YUV420P:
867 case QVideoFrameFormat::Format_YUV420P10:
868 case QVideoFrameFormat::Format_YUV422P:
869 case QVideoFrameFormat::Format_YV12:
870 case QVideoFrameFormat::Format_UYVY:
871 case QVideoFrameFormat::Format_YUYV:
872 case QVideoFrameFormat::Format_NV12:
873 case QVideoFrameFormat::Format_NV21:
874 case QVideoFrameFormat::Format_IMC1:
875 case QVideoFrameFormat::Format_IMC2:
876 case QVideoFrameFormat::Format_IMC3:
877 case QVideoFrameFormat::Format_IMC4:
878 case QVideoFrameFormat::Format_P010:
879 case QVideoFrameFormat::Format_P016:
880 case QVideoFrameFormat::Format_Jpeg:
881 case QVideoFrameFormat::Format_Invalid:
882 case QVideoFrameFormat::Format_SamplerExternalOES:
883 case QVideoFrameFormat::Format_SamplerRect:
884 return QImage::Format_Invalid;
885 }
886 return QImage::Format_Invalid;
887}
888
889/*!
890 Returns a string representation of the given \a pixelFormat.
891*/
892QString QVideoFrameFormat::pixelFormatToString(QVideoFrameFormat::PixelFormat pixelFormat)
893{
894 switch (pixelFormat) {
895 case QVideoFrameFormat::Format_Invalid:
896 return QStringLiteral("Invalid");
897 case QVideoFrameFormat::Format_ARGB8888:
898 return QStringLiteral("ARGB8888");
899 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
900 return QStringLiteral("ARGB8888 Premultiplied");
901 case QVideoFrameFormat::Format_XRGB8888:
902 return QStringLiteral("XRGB8888");
903 case QVideoFrameFormat::Format_BGRA8888:
904 return QStringLiteral("BGRA8888");
905 case QVideoFrameFormat::Format_BGRX8888:
906 return QStringLiteral("BGRX8888");
907 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
908 return QStringLiteral("BGRA8888 Premultiplied");
909 case QVideoFrameFormat::Format_RGBA8888:
910 return QStringLiteral("RGBA8888");
911 case QVideoFrameFormat::Format_RGBX8888:
912 return QStringLiteral("RGBX8888");
913 case QVideoFrameFormat::Format_ABGR8888:
914 return QStringLiteral("ABGR8888");
915 case QVideoFrameFormat::Format_XBGR8888:
916 return QStringLiteral("XBGR8888");
917 case QVideoFrameFormat::Format_AYUV:
918 return QStringLiteral("AYUV");
919 case QVideoFrameFormat::Format_AYUV_Premultiplied:
920 return QStringLiteral("AYUV Premultiplied");
921 case QVideoFrameFormat::Format_YUV420P:
922 return QStringLiteral("YUV420P");
923 case QVideoFrameFormat::Format_YUV420P10:
924 return QStringLiteral("YUV420P10");
925 case QVideoFrameFormat::Format_YUV422P:
926 return QStringLiteral("YUV422P");
927 case QVideoFrameFormat::Format_YV12:
928 return QStringLiteral("YV12");
929 case QVideoFrameFormat::Format_UYVY:
930 return QStringLiteral("UYVY");
931 case QVideoFrameFormat::Format_YUYV:
932 return QStringLiteral("YUYV");
933 case QVideoFrameFormat::Format_NV12:
934 return QStringLiteral("NV12");
935 case QVideoFrameFormat::Format_NV21:
936 return QStringLiteral("NV21");
937 case QVideoFrameFormat::Format_IMC1:
938 return QStringLiteral("IMC1");
939 case QVideoFrameFormat::Format_IMC2:
940 return QStringLiteral("IMC2");
941 case QVideoFrameFormat::Format_IMC3:
942 return QStringLiteral("IMC3");
943 case QVideoFrameFormat::Format_IMC4:
944 return QStringLiteral("IMC4");
945 case QVideoFrameFormat::Format_Y8:
946 return QStringLiteral("Y8");
947 case QVideoFrameFormat::Format_Y16:
948 return QStringLiteral("Y16");
949 case QVideoFrameFormat::Format_P010:
950 return QStringLiteral("P010");
951 case QVideoFrameFormat::Format_P016:
952 return QStringLiteral("P016");
953 case QVideoFrameFormat::Format_SamplerExternalOES:
954 return QStringLiteral("SamplerExternalOES");
955 case QVideoFrameFormat::Format_Jpeg:
956 return QStringLiteral("Jpeg");
957 case QVideoFrameFormat::Format_SamplerRect:
958 return QStringLiteral("SamplerRect");
959 }
960
961 return QStringLiteral("");
962}
963
964#ifndef QT_NO_DEBUG_STREAM
965# if QT_DEPRECATED_SINCE(6, 4)
966QDebug operator<<(QDebug dbg, QVideoFrameFormat::YCbCrColorSpace cs)
967{
968 QDebugStateSaver saver(dbg);
969 dbg.nospace();
970 switch (cs) {
971 case QVideoFrameFormat::YCbCr_BT601:
972 dbg << "YCbCr_BT601";
973 break;
974 case QVideoFrameFormat::YCbCr_BT709:
975 dbg << "YCbCr_BT709";
976 break;
977 case QVideoFrameFormat::YCbCr_JPEG:
978 dbg << "YCbCr_JPEG";
979 break;
980 case QVideoFrameFormat::YCbCr_xvYCC601:
981 dbg << "YCbCr_xvYCC601";
982 break;
983 case QVideoFrameFormat::YCbCr_xvYCC709:
984 dbg << "YCbCr_xvYCC709";
985 break;
986 case QVideoFrameFormat::YCbCr_BT2020:
987 dbg << "YCbCr_BT2020";
988 break;
989 default:
990 dbg << "YCbCr_Undefined";
991 break;
992 }
993 return dbg;
994}
995# endif // QT_DEPRECATED_SINCE(6, 4)
996
997QDebug operator<<(QDebug dbg, QVideoFrameFormat::ColorSpace cs)
998{
999 QDebugStateSaver saver(dbg);
1000 dbg.nospace();
1001 switch (cs) {
1002 case QVideoFrameFormat::ColorSpace_BT601:
1003 dbg << "ColorSpace_BT601";
1004 break;
1005 case QVideoFrameFormat::ColorSpace_BT709:
1006 dbg << "ColorSpace_BT709";
1007 break;
1008 case QVideoFrameFormat::ColorSpace_AdobeRgb:
1009 dbg << "ColorSpace_AdobeRgb";
1010 break;
1011 case QVideoFrameFormat::ColorSpace_BT2020:
1012 dbg << "ColorSpace_BT2020";
1013 break;
1014 default:
1015 dbg << "ColorSpace_Undefined";
1016 break;
1017 }
1018 return dbg;
1019}
1020
1021QDebug operator<<(QDebug dbg, QVideoFrameFormat::Direction dir)
1022{
1023 QDebugStateSaver saver(dbg);
1024 dbg.nospace();
1025 switch (dir) {
1026 case QVideoFrameFormat::BottomToTop:
1027 dbg << "BottomToTop";
1028 break;
1029 case QVideoFrameFormat::TopToBottom:
1030 dbg << "TopToBottom";
1031 break;
1032 }
1033 return dbg;
1034}
1035
1036QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
1037{
1038 QDebugStateSaver saver(dbg);
1039 dbg.nospace();
1040 dbg << "QVideoFrameFormat(" << f.pixelFormat() << ", " << f.frameSize()
1041 << ", viewport=" << f.viewport()
1042 << ", colorSpace=" << f.colorSpace()
1043 << ')'
1044 << "\n pixel format=" << f.pixelFormat()
1045 << "\n frame size=" << f.frameSize()
1046 << "\n viewport=" << f.viewport()
1047 << "\n colorSpace=" << f.colorSpace()
1048 << "\n frameRate=" << f.streamFrameRate()
1049 << "\n mirrored=" << f.isMirrored();
1050
1051 return dbg;
1052}
1053
1054QDebug operator<<(QDebug dbg, QVideoFrameFormat::PixelFormat pf)
1055{
1056 QDebugStateSaver saver(dbg);
1057 dbg.nospace();
1058
1059 auto format = QVideoFrameFormat::pixelFormatToString(pixelFormat: pf);
1060 if (format.isEmpty())
1061 return dbg;
1062
1063 dbg.noquote() << QStringLiteral("Format_") << format;
1064 return dbg;
1065}
1066#endif
1067
1068QT_END_NAMESPACE
1069

source code of qtmultimedia/src/multimedia/video/qvideoframeformat.cpp