1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qvideoframeformat.h" |
5 | #include "qvideotexturehelper_p.h" |
6 | |
7 | #include <qdebug.h> |
8 | #include <qlist.h> |
9 | #include <qmetatype.h> |
10 | #include <qpair.h> |
11 | #include <qvariant.h> |
12 | #include <qmatrix4x4.h> |
13 | |
14 | static void initResource() { |
15 | Q_INIT_RESOURCE(shaders); |
16 | } |
17 | |
18 | QT_BEGIN_NAMESPACE |
19 | |
20 | class QVideoFrameFormatPrivate : public QSharedData |
21 | { |
22 | public: |
23 | QVideoFrameFormatPrivate() = default; |
24 | |
25 | QVideoFrameFormatPrivate( |
26 | const QSize &size, |
27 | QVideoFrameFormat::PixelFormat format) |
28 | : pixelFormat(format) |
29 | , frameSize(size) |
30 | , viewport(QPoint(0, 0), size) |
31 | { |
32 | } |
33 | |
34 | bool operator ==(const QVideoFrameFormatPrivate &other) const |
35 | { |
36 | if (pixelFormat == other.pixelFormat |
37 | && scanLineDirection == other.scanLineDirection |
38 | && frameSize == other.frameSize |
39 | && viewport == other.viewport |
40 | && frameRatesEqual(r1: frameRate, r2: other.frameRate) |
41 | && colorSpace == other.colorSpace |
42 | && mirrored == other.mirrored) |
43 | return true; |
44 | |
45 | return false; |
46 | } |
47 | |
48 | inline static bool frameRatesEqual(qreal r1, qreal r2) |
49 | { |
50 | return qAbs(t: r1 - r2) <= 0.00001 * qMin(a: qAbs(t: r1), b: qAbs(t: r2)); |
51 | } |
52 | |
53 | QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid; |
54 | QVideoFrameFormat::Direction scanLineDirection = QVideoFrameFormat::TopToBottom; |
55 | QSize frameSize; |
56 | QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined; |
57 | QVideoFrameFormat::ColorTransfer colorTransfer = QVideoFrameFormat::ColorTransfer_Unknown; |
58 | QVideoFrameFormat::ColorRange colorRange = QVideoFrameFormat::ColorRange_Unknown; |
59 | QRect viewport; |
60 | float frameRate = 0.0; |
61 | float maxLuminance = -1.; |
62 | bool mirrored = false; |
63 | }; |
64 | |
65 | QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate); |
66 | |
67 | /*! |
68 | \class QVideoFrameFormat |
69 | \brief The QVideoFrameFormat class specifies the stream format of a video presentation |
70 | surface. |
71 | \inmodule QtMultimedia |
72 | |
73 | \ingroup multimedia |
74 | \ingroup multimedia_video |
75 | |
76 | A video sink presents a stream of video frames. QVideoFrameFormat describes the type of |
77 | the frames and determines how they should be presented. |
78 | |
79 | The core properties of a video stream required to set up a video sink are the pixel format |
80 | given by pixelFormat(), and the frame dimensions given by frameSize(). |
81 | |
82 | The region of a frame that is actually displayed on a video surface is given by the viewport(). |
83 | A stream may have a viewport less than the entire region of a frame to allow for videos smaller |
84 | than the nearest optimal size of a video frame. For example the width of a frame may be |
85 | extended so that the start of each scan line is eight byte aligned. |
86 | |
87 | Other common properties are the scanLineDirection(), frameRate() and the yCrCbColorSpace(). |
88 | */ |
89 | |
90 | /*! |
91 | \enum QVideoFrameFormat::PixelFormat |
92 | |
93 | Enumerates video data types. |
94 | |
95 | \value Format_Invalid |
96 | The frame is invalid. |
97 | |
98 | \value Format_ARGB8888 |
99 | The frame is stored using a ARGB format with 8 bits per component. |
100 | |
101 | \value Format_ARGB8888_Premultiplied |
102 | The frame stored using a premultiplied ARGB format with 8 bits per component. |
103 | |
104 | \value Format_XRGB8888 |
105 | The frame stored using a 32 bits per pixel RGB format (0xff, R, G, B). |
106 | |
107 | \value Format_BGRA8888 |
108 | The frame is stored using a 32-bit BGRA format (0xBBGGRRAA). |
109 | |
110 | \value Format_BGRA8888_Premultiplied |
111 | The frame is stored using a premultiplied 32bit BGRA format. |
112 | |
113 | \value Format_ABGR8888 |
114 | The frame is stored using a 32-bit ABGR format (0xAABBGGRR). |
115 | |
116 | \value Format_XBGR8888 |
117 | The frame is stored using a 32-bit BGR format (0xffBBGGRR). |
118 | |
119 | \value Format_RGBA8888 |
120 | The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address. |
121 | |
122 | \value Format_BGRX8888 |
123 | The frame is stored in format 32-bit BGRx format, [31:0] B:G:R:x 8:8:8:8 little endian |
124 | |
125 | \value Format_RGBX8888 |
126 | The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address. |
127 | |
128 | \value Format_AYUV |
129 | The frame is stored using a packed 32-bit AYUV format (0xAAYYUUVV). |
130 | |
131 | \value Format_AYUV_Premultiplied |
132 | The frame is stored using a packed premultiplied 32-bit AYUV format (0xAAYYUUVV). |
133 | |
134 | \value Format_YUV420P |
135 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
136 | horizontally and vertically sub-sampled, i.e. the height and width of the U and V planes are |
137 | half that of the Y plane. |
138 | |
139 | \value Format_YUV422P |
140 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
141 | horizontally sub-sampled, i.e. the width of the U and V planes are |
142 | half that of the Y plane, and height of U and V planes is the same as Y. |
143 | |
144 | \value Format_YV12 |
145 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
146 | horizontally and vertically sub-sampled, i.e. the height and width of the V and U planes are |
147 | half that of the Y plane. |
148 | |
149 | \value Format_UYVY |
150 | The frame is stored using an 8-bit per component packed YUV format with the U and V planes |
151 | horizontally sub-sampled (U-Y-V-Y), i.e. two horizontally adjacent pixels are stored as a 32-bit |
152 | macropixel which has a Y value for each pixel and common U and V values. |
153 | |
154 | \value Format_YUYV |
155 | The frame is stored using an 8-bit per component packed YUV format with the U and V planes |
156 | horizontally sub-sampled (Y-U-Y-V), i.e. two horizontally adjacent pixels are stored as a 32-bit |
157 | macropixel which has a Y value for each pixel and common U and V values. |
158 | |
159 | \value Format_NV12 |
160 | The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y) |
161 | followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). |
162 | |
163 | \value Format_NV21 |
164 | The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y) |
165 | followed by a horizontally and vertically sub-sampled, packed VU plane (V-U). |
166 | |
167 | \value Format_IMC1 |
168 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
169 | horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except |
170 | that the bytes per line of the U and V planes are padded out to the same stride as the Y plane. |
171 | |
172 | \value Format_IMC2 |
173 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
174 | horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except |
175 | that the lines of the U and V planes are interleaved, i.e. each line of U data is followed by a |
176 | line of V data creating a single line of the same stride as the Y data. |
177 | |
178 | \value Format_IMC3 |
179 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
180 | horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that |
181 | the bytes per line of the V and U planes are padded out to the same stride as the Y plane. |
182 | |
183 | \value Format_IMC4 |
184 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
185 | horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that |
186 | the lines of the V and U planes are interleaved, i.e. each line of V data is followed by a line |
187 | of U data creating a single line of the same stride as the Y data. |
188 | |
189 | \value Format_P010 |
190 | The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y) |
191 | followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). Only the 10 most |
192 | significant bits of each component are being used. |
193 | |
194 | \value Format_P016 |
195 | The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y) |
196 | followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). |
197 | |
198 | \value Format_Y8 |
199 | The frame is stored using an 8-bit greyscale format. |
200 | |
201 | \value Format_Y16 |
202 | The frame is stored using a 16-bit linear greyscale format. Little endian. |
203 | |
204 | \value Format_Jpeg |
205 | The frame is stored in compressed Jpeg format. |
206 | |
207 | \value Format_SamplerExternalOES |
208 | The frame is stored in external OES texture format. This is currently only being used on Android. |
209 | |
210 | \value Format_SamplerRect |
211 | The frame is stored in rectangle texture format (GL_TEXTURE_RECTANGLE). This is only being used on |
212 | macOS with an OpenGL based Rendering Hardware interface. The underlying pixel format stored in the |
213 | texture is Format_BRGA8888. |
214 | |
215 | \value Format_YUV420P10 |
216 | Similar to YUV420, but uses 16bits per component, 10 of those significant. |
217 | */ |
218 | |
219 | /*! |
220 | \enum QVideoFrameFormat::Direction |
221 | |
222 | Enumerates the layout direction of video scan lines. |
223 | |
224 | \value TopToBottom Scan lines are arranged from the top of the frame to the bottom. |
225 | \value BottomToTop Scan lines are arranged from the bottom of the frame to the top. |
226 | */ |
227 | |
228 | /*! |
229 | \enum QVideoFrameFormat::YCbCrColorSpace |
230 | |
231 | \deprecated Use QVideoFrameFormat::ColorSpace instead. |
232 | |
233 | Enumerates the Y'CbCr color space of video frames. |
234 | |
235 | \value YCbCr_Undefined |
236 | No color space is specified. |
237 | |
238 | \value YCbCr_BT601 |
239 | A Y'CbCr color space defined by ITU-R recommendation BT.601 |
240 | with Y value range from 16 to 235, and Cb/Cr range from 16 to 240. |
241 | Used mostly by older videos that were targeting CRT displays. |
242 | |
243 | \value YCbCr_BT709 |
244 | A Y'CbCr color space defined by ITU-R BT.709 with the same values range as YCbCr_BT601. |
245 | The most commonly used color space today. |
246 | |
247 | \value YCbCr_xvYCC601 |
248 | This value is deprecated. Please check the \l ColorRange instead. |
249 | The BT.601 color space with the value range extended to 0 to 255. |
250 | It is backward compatible with BT.601 and uses values outside BT.601 range to represent a |
251 | wider range of colors. |
252 | |
253 | \value YCbCr_xvYCC709 |
254 | This value is deprecated. Please check the \l ColorRange instead. |
255 | The BT.709 color space with the value range extended to 0 to 255. |
256 | |
257 | \value YCbCr_JPEG |
258 | The full range Y'CbCr color space used in most JPEG files. |
259 | |
260 | \value YCbCr_BT2020 |
261 | The color space defined by ITU-R BT.2020. Used mainly for HDR videos. |
262 | */ |
263 | |
264 | |
265 | /*! |
266 | \enum QVideoFrameFormat::ColorSpace |
267 | |
268 | Enumerates the color space of video frames. |
269 | |
270 | \value ColorSpace_Undefined |
271 | No color space is specified. |
272 | |
273 | \value ColorSpace_BT601 |
274 | A color space defined by ITU-R recommendation BT.601 |
275 | with Y value range from 16 to 235, and Cb/Cr range from 16 to 240. |
276 | Used mostly by older videos that were targeting CRT displays. |
277 | |
278 | \value ColorSpace_BT709 |
279 | A color space defined by ITU-R BT.709 with the same values range as ColorSpace_BT601. |
280 | The most commonly used color space today. |
281 | |
282 | \value ColorSpace_AdobeRgb |
283 | The full range YUV color space used in most JPEG files. |
284 | |
285 | \value ColorSpace_BT2020 |
286 | The color space defined by ITU-R BT.2020. Used mainly for HDR videos. |
287 | */ |
288 | |
289 | /*! |
290 | \enum QVideoFrameFormat::ColorTransfer |
291 | |
292 | \value ColorTransfer_Unknown |
293 | The color transfer function is unknown. |
294 | |
295 | \value ColorTransfer_BT709 |
296 | Color values are encoded according to BT709. See also https://www.itu.int/rec/R-REC-BT.709/en. |
297 | This is close to, but not identical to a gamma curve of 2.2, and the same transfer curve as is |
298 | used in sRGB. |
299 | |
300 | \value ColorTransfer_BT601 |
301 | Color values are encoded according to BT601. See also https://www.itu.int/rec/R-REC-BT.601/en. |
302 | |
303 | \value ColorTransfer_Linear |
304 | Color values are linear |
305 | |
306 | \value ColorTransfer_Gamma22 |
307 | Color values are encoded with a gamma of 2.2 |
308 | |
309 | \value ColorTransfer_Gamma28 |
310 | Color values are encoded with a gamma of 2.8 |
311 | |
312 | \value ColorTransfer_ST2084 |
313 | Color values are encoded using STME ST 2084. This transfer function is the most common HDR |
314 | transfer function and often called the 'perceptual quantizer'. See also https://www.itu.int/rec/R-REC-BT.2100 |
315 | and https://en.wikipedia.org/wiki/Perceptual_quantizer. |
316 | |
317 | |
318 | \value ColorTransfer_STD_B67 |
319 | Color values are encoded using ARIB STD B67. This transfer function is also often referred to as 'hybrid log gamma'. |
320 | See also https://www.itu.int/rec/R-REC-BT.2100 and https://en.wikipedia.org/wiki/Hybrid_log–gamma. |
321 | */ |
322 | |
323 | /*! |
324 | \enum QVideoFrameFormat::ColorRange |
325 | |
326 | Describes the color range used by the video data. Video data usually comes in either full |
327 | color range, where all values are being used, or a more limited range traditionally used in |
328 | YUV video formats, where a subset of all values is being used. |
329 | |
330 | \value ColorRange_Unknown |
331 | The color range of the video is unknown. |
332 | |
333 | \value ColorRange_Video |
334 | |
335 | The color range traditionally used by most YUV video formats. For 8 bit formats, the Y component is |
336 | limited to values between 16 and 235. The U and V components are limited to values between 16 and 240 |
337 | |
338 | For higher bit depths multiply these values with 2^(depth-8). |
339 | |
340 | \value ColorRange_Full |
341 | |
342 | Full color range. All values from 0 to 2^depth - 1 are valid. |
343 | */ |
344 | |
345 | /*! |
346 | Constructs a null video stream format. |
347 | */ |
348 | QVideoFrameFormat::QVideoFrameFormat() |
349 | : d(new QVideoFrameFormatPrivate) |
350 | { |
351 | initResource(); |
352 | } |
353 | |
354 | /*! |
355 | Constructs a video stream with the given frame \a size and pixel \a format. |
356 | */ |
357 | QVideoFrameFormat::QVideoFrameFormat( |
358 | const QSize& size, QVideoFrameFormat::PixelFormat format) |
359 | : d(new QVideoFrameFormatPrivate(size, format)) |
360 | { |
361 | } |
362 | |
363 | /*! |
364 | Constructs a copy of \a other. |
365 | */ |
366 | QVideoFrameFormat::QVideoFrameFormat(const QVideoFrameFormat &other) = default; |
367 | |
368 | /*! |
369 | \fn QVideoFrameFormat::QVideoFrameFormat(QVideoFrameFormat &&other) |
370 | |
371 | Constructs a QVideoFrameFormat by moving from \a other. |
372 | */ |
373 | |
374 | /*! |
375 | \fn void QVideoFrameFormat::swap(QVideoFrameFormat &other) noexcept |
376 | |
377 | Swaps the current video frame format with the \a other. |
378 | */ |
379 | |
380 | /*! |
381 | Assigns the values of \a other to this object. |
382 | */ |
383 | QVideoFrameFormat &QVideoFrameFormat::operator =(const QVideoFrameFormat &other) = default; |
384 | |
385 | /*! |
386 | \fn QVideoFrameFormat &QVideoFrameFormat::operator =(QVideoFrameFormat &&other) |
387 | |
388 | Moves \a other into this QVideoFrameFormat. |
389 | */ |
390 | |
391 | /*! |
392 | Destroys a video stream description. |
393 | */ |
394 | QVideoFrameFormat::~QVideoFrameFormat() = default; |
395 | |
396 | /*! |
397 | Identifies if a video surface format has a valid pixel format and frame size. |
398 | |
399 | Returns true if the format is valid, and false otherwise. |
400 | */ |
401 | bool QVideoFrameFormat::isValid() const |
402 | { |
403 | return d->pixelFormat != Format_Invalid && d->frameSize.isValid(); |
404 | } |
405 | |
406 | /*! |
407 | Returns true if \a other is the same as this video format, and false if they are different. |
408 | */ |
409 | bool QVideoFrameFormat::operator ==(const QVideoFrameFormat &other) const |
410 | { |
411 | return d == other.d || *d == *other.d; |
412 | } |
413 | |
414 | /*! |
415 | Returns true if \a other is different to this video format, and false if they are the same. |
416 | */ |
417 | bool QVideoFrameFormat::operator !=(const QVideoFrameFormat &other) const |
418 | { |
419 | return d != other.d && !(*d == *other.d); |
420 | } |
421 | |
422 | /*! |
423 | \internal |
424 | */ |
425 | void QVideoFrameFormat::detach() |
426 | { |
427 | d.detach(); |
428 | } |
429 | |
430 | /*! |
431 | Returns the pixel format of frames in a video stream. |
432 | */ |
433 | QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormat() const |
434 | { |
435 | return d->pixelFormat; |
436 | } |
437 | |
438 | /*! |
439 | Returns the dimensions of frames in a video stream. |
440 | |
441 | \sa frameWidth(), frameHeight() |
442 | */ |
443 | QSize QVideoFrameFormat::frameSize() const |
444 | { |
445 | return d->frameSize; |
446 | } |
447 | |
448 | /*! |
449 | Returns the width of frames in a video stream. |
450 | |
451 | \sa frameSize(), frameHeight() |
452 | */ |
453 | int QVideoFrameFormat::frameWidth() const |
454 | { |
455 | return d->frameSize.width(); |
456 | } |
457 | |
458 | /*! |
459 | Returns the height of frame in a video stream. |
460 | */ |
461 | int QVideoFrameFormat::frameHeight() const |
462 | { |
463 | return d->frameSize.height(); |
464 | } |
465 | |
466 | /*! |
467 | Returns the number of planes used. |
468 | This number is depending on the pixel format and is |
469 | 1 for RGB based formats, and a number between 1 and 3 for |
470 | YUV based formats. |
471 | */ |
472 | int QVideoFrameFormat::planeCount() const |
473 | { |
474 | return QVideoTextureHelper::textureDescription(format: d->pixelFormat)->nplanes; |
475 | } |
476 | |
477 | /*! |
478 | Sets the size of frames in a video stream to \a size. |
479 | |
480 | This will reset the viewport() to fill the entire frame. |
481 | */ |
482 | void QVideoFrameFormat::setFrameSize(const QSize &size) |
483 | { |
484 | detach(); |
485 | d->frameSize = size; |
486 | d->viewport = QRect(QPoint(0, 0), size); |
487 | } |
488 | |
489 | /*! |
490 | \overload |
491 | |
492 | Sets the \a width and \a height of frames in a video stream. |
493 | |
494 | This will reset the viewport() to fill the entire frame. |
495 | */ |
496 | void QVideoFrameFormat::setFrameSize(int width, int height) |
497 | { |
498 | detach(); |
499 | d->frameSize = QSize(width, height); |
500 | d->viewport = QRect(0, 0, width, height); |
501 | } |
502 | |
503 | /*! |
504 | Returns the viewport of a video stream. |
505 | |
506 | The viewport is the region of a video frame that is actually displayed. |
507 | |
508 | By default the viewport covers an entire frame. |
509 | */ |
510 | QRect QVideoFrameFormat::viewport() const |
511 | { |
512 | return d->viewport; |
513 | } |
514 | |
515 | /*! |
516 | Sets the viewport of a video stream to \a viewport. |
517 | */ |
518 | void QVideoFrameFormat::setViewport(const QRect &viewport) |
519 | { |
520 | detach(); |
521 | d->viewport = viewport; |
522 | } |
523 | |
524 | /*! |
525 | Returns the direction of scan lines. |
526 | */ |
527 | QVideoFrameFormat::Direction QVideoFrameFormat::scanLineDirection() const |
528 | { |
529 | return d->scanLineDirection; |
530 | } |
531 | |
532 | /*! |
533 | Sets the \a direction of scan lines. |
534 | */ |
535 | void QVideoFrameFormat::setScanLineDirection(Direction direction) |
536 | { |
537 | detach(); |
538 | d->scanLineDirection = direction; |
539 | } |
540 | |
541 | /*! |
542 | Returns the frame rate of a video stream in frames per second. |
543 | */ |
544 | qreal QVideoFrameFormat::frameRate() const |
545 | { |
546 | return d->frameRate; |
547 | } |
548 | |
549 | /*! |
550 | Sets the frame \a rate of a video stream in frames per second. |
551 | */ |
552 | void QVideoFrameFormat::setFrameRate(qreal rate) |
553 | { |
554 | detach(); |
555 | d->frameRate = rate; |
556 | } |
557 | |
558 | #if QT_DEPRECATED_SINCE(6, 4) |
559 | /*! |
560 | \deprecated Use colorSpace() instead |
561 | |
562 | Returns the Y'CbCr color space of a video stream. |
563 | */ |
564 | QVideoFrameFormat::YCbCrColorSpace QVideoFrameFormat::yCbCrColorSpace() const |
565 | { |
566 | return YCbCrColorSpace(d->colorSpace); |
567 | } |
568 | |
569 | /*! |
570 | \deprecated Use setColorSpace() instead |
571 | |
572 | Sets the Y'CbCr color \a space of a video stream. |
573 | It is only used with raw YUV frame types. |
574 | */ |
575 | void QVideoFrameFormat::setYCbCrColorSpace(QVideoFrameFormat::YCbCrColorSpace space) |
576 | { |
577 | detach(); |
578 | d->colorSpace = ColorSpace(space); |
579 | } |
580 | #endif // QT_DEPRECATED_SINCE(6, 4) |
581 | |
582 | /*! |
583 | Returns the color space of a video stream. |
584 | */ |
585 | QVideoFrameFormat::ColorSpace QVideoFrameFormat::colorSpace() const |
586 | { |
587 | return d->colorSpace; |
588 | } |
589 | |
590 | /*! |
591 | Sets the \a colorSpace of a video stream. |
592 | */ |
593 | void QVideoFrameFormat::setColorSpace(ColorSpace colorSpace) |
594 | { |
595 | detach(); |
596 | d->colorSpace = colorSpace; |
597 | } |
598 | |
599 | /*! |
600 | Returns the color transfer function that should be used to render the |
601 | video stream. |
602 | */ |
603 | QVideoFrameFormat::ColorTransfer QVideoFrameFormat::colorTransfer() const |
604 | { |
605 | return d->colorTransfer; |
606 | } |
607 | |
608 | /*! |
609 | Sets the color transfer function that should be used to render the |
610 | video stream to \a colorTransfer. |
611 | */ |
612 | void QVideoFrameFormat::setColorTransfer(ColorTransfer colorTransfer) |
613 | { |
614 | detach(); |
615 | d->colorTransfer = colorTransfer; |
616 | } |
617 | |
618 | /*! |
619 | Returns the color range that should be used to render the |
620 | video stream. |
621 | */ |
622 | QVideoFrameFormat::ColorRange QVideoFrameFormat::colorRange() const |
623 | { |
624 | return d->colorRange; |
625 | } |
626 | |
627 | /*! |
628 | Sets the color transfer range that should be used to render the |
629 | video stream to \a range. |
630 | */ |
631 | void QVideoFrameFormat::setColorRange(ColorRange range) |
632 | { |
633 | detach(); |
634 | d->colorRange = range; |
635 | } |
636 | |
637 | /*! |
638 | Returns \c true if the surface is mirrored around its vertical axis. |
639 | This is typically needed for video frames coming from a front camera of a mobile device. |
640 | |
641 | \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage |
642 | will be mirrored around its x-axis. |
643 | |
644 | \since 5.11 |
645 | */ |
646 | bool QVideoFrameFormat::isMirrored() const |
647 | { |
648 | return d->mirrored; |
649 | } |
650 | |
651 | /*! |
652 | Sets if the surface is \a mirrored around its vertical axis. |
653 | This is typically needed for video frames coming from a front camera of a mobile device. |
654 | Default value is false. |
655 | |
656 | \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage |
657 | will be mirrored around its x-axis. |
658 | |
659 | \since 5.11 |
660 | */ |
661 | void QVideoFrameFormat::setMirrored(bool mirrored) |
662 | { |
663 | detach(); |
664 | d->mirrored = mirrored; |
665 | } |
666 | |
667 | /*! |
668 | \internal |
669 | */ |
670 | QString QVideoFrameFormat::vertexShaderFileName() const |
671 | { |
672 | return QVideoTextureHelper::vertexShaderFileName(format: *this); |
673 | } |
674 | |
675 | /*! |
676 | \internal |
677 | */ |
678 | QString QVideoFrameFormat::fragmentShaderFileName() const |
679 | { |
680 | return QVideoTextureHelper::fragmentShaderFileName(format: *this); |
681 | } |
682 | |
683 | /*! |
684 | \internal |
685 | */ |
686 | void QVideoFrameFormat::updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const |
687 | { |
688 | QVideoTextureHelper::updateUniformData(dst, format: *this, frame, transform, opacity); |
689 | } |
690 | |
691 | /*! |
692 | \internal |
693 | |
694 | The maximum luminence in nits as set by the HDR metadata. If the video doesn't have meta data, the returned value depends on the |
695 | maximum that can be encoded by the transfer function. |
696 | */ |
697 | float QVideoFrameFormat::maxLuminance() const |
698 | { |
699 | if (d->maxLuminance <= 0) { |
700 | if (d->colorTransfer == ColorTransfer_ST2084) |
701 | return 10000.; // ST2084 can encode up to 10000 nits |
702 | if (d->colorTransfer == ColorTransfer_STD_B67) |
703 | return 1500.; // SRD_B67 can encode up to 1200 nits, use a bit more for some headroom |
704 | return 100; // SDR |
705 | } |
706 | return d->maxLuminance; |
707 | } |
708 | /*! |
709 | Sets the maximum luminance to the given value, \a lum. |
710 | */ |
711 | void QVideoFrameFormat::setMaxLuminance(float lum) |
712 | { |
713 | detach(); |
714 | d->maxLuminance = lum; |
715 | } |
716 | |
717 | |
718 | /*! |
719 | Returns a video pixel format equivalent to an image \a format. If there is no equivalent |
720 | format QVideoFrameFormat::Format_Invalid is returned instead. |
721 | |
722 | \note In general \l QImage does not handle YUV formats. |
723 | |
724 | */ |
725 | QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormatFromImageFormat(QImage::Format format) |
726 | { |
727 | switch (format) { |
728 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
729 | case QImage::Format_RGB32: |
730 | return QVideoFrameFormat::Format_BGRX8888; |
731 | case QImage::Format_ARGB32: |
732 | return QVideoFrameFormat::Format_BGRA8888; |
733 | case QImage::Format_ARGB32_Premultiplied: |
734 | return QVideoFrameFormat::Format_BGRA8888_Premultiplied; |
735 | #else |
736 | case QImage::Format_RGB32: |
737 | return QVideoFrameFormat::Format_XRGB8888; |
738 | case QImage::Format_ARGB32: |
739 | return QVideoFrameFormat::Format_ARGB8888; |
740 | case QImage::Format_ARGB32_Premultiplied: |
741 | return QVideoFrameFormat::Format_ARGB8888_Premultiplied; |
742 | #endif |
743 | case QImage::Format_RGBA8888: |
744 | return QVideoFrameFormat::Format_RGBA8888; |
745 | case QImage::Format_RGBA8888_Premultiplied: |
746 | return QVideoFrameFormat::Format_ARGB8888_Premultiplied; |
747 | case QImage::Format_RGBX8888: |
748 | return QVideoFrameFormat::Format_RGBX8888; |
749 | case QImage::Format_Grayscale8: |
750 | return QVideoFrameFormat::Format_Y8; |
751 | case QImage::Format_Grayscale16: |
752 | return QVideoFrameFormat::Format_Y16; |
753 | default: |
754 | return QVideoFrameFormat::Format_Invalid; |
755 | } |
756 | } |
757 | |
758 | /*! |
759 | Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent |
760 | format QImage::Format_Invalid is returned instead. |
761 | |
762 | \note In general \l QImage does not handle YUV formats. |
763 | |
764 | */ |
765 | QImage::Format QVideoFrameFormat::imageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format) |
766 | { |
767 | switch (format) { |
768 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
769 | case QVideoFrameFormat::Format_BGRA8888: |
770 | return QImage::Format_ARGB32; |
771 | case QVideoFrameFormat::Format_BGRA8888_Premultiplied: |
772 | return QImage::Format_ARGB32_Premultiplied; |
773 | case QVideoFrameFormat::Format_BGRX8888: |
774 | return QImage::Format_RGB32; |
775 | case QVideoFrameFormat::Format_ARGB8888: |
776 | case QVideoFrameFormat::Format_ARGB8888_Premultiplied: |
777 | case QVideoFrameFormat::Format_XRGB8888: |
778 | return QImage::Format_Invalid; |
779 | #else |
780 | case QVideoFrameFormat::Format_ARGB8888: |
781 | return QImage::Format_ARGB32; |
782 | case QVideoFrameFormat::Format_ARGB8888_Premultiplied: |
783 | return QImage::Format_ARGB32_Premultiplied; |
784 | case QVideoFrameFormat::Format_XRGB8888: |
785 | return QImage::Format_RGB32; |
786 | case QVideoFrameFormat::Format_BGRA8888: |
787 | case QVideoFrameFormat::Format_BGRA8888_Premultiplied: |
788 | case QVideoFrameFormat::Format_BGRX8888: |
789 | return QImage::Format_Invalid; |
790 | #endif |
791 | case QVideoFrameFormat::Format_RGBA8888: |
792 | return QImage::Format_RGBA8888; |
793 | case QVideoFrameFormat::Format_RGBX8888: |
794 | return QImage::Format_RGBX8888; |
795 | case QVideoFrameFormat::Format_Y8: |
796 | return QImage::Format_Grayscale8; |
797 | case QVideoFrameFormat::Format_Y16: |
798 | return QImage::Format_Grayscale16; |
799 | case QVideoFrameFormat::Format_ABGR8888: |
800 | case QVideoFrameFormat::Format_XBGR8888: |
801 | case QVideoFrameFormat::Format_AYUV: |
802 | case QVideoFrameFormat::Format_AYUV_Premultiplied: |
803 | case QVideoFrameFormat::Format_YUV420P: |
804 | case QVideoFrameFormat::Format_YUV420P10: |
805 | case QVideoFrameFormat::Format_YUV422P: |
806 | case QVideoFrameFormat::Format_YV12: |
807 | case QVideoFrameFormat::Format_UYVY: |
808 | case QVideoFrameFormat::Format_YUYV: |
809 | case QVideoFrameFormat::Format_NV12: |
810 | case QVideoFrameFormat::Format_NV21: |
811 | case QVideoFrameFormat::Format_IMC1: |
812 | case QVideoFrameFormat::Format_IMC2: |
813 | case QVideoFrameFormat::Format_IMC3: |
814 | case QVideoFrameFormat::Format_IMC4: |
815 | case QVideoFrameFormat::Format_P010: |
816 | case QVideoFrameFormat::Format_P016: |
817 | case QVideoFrameFormat::Format_Jpeg: |
818 | case QVideoFrameFormat::Format_Invalid: |
819 | case QVideoFrameFormat::Format_SamplerExternalOES: |
820 | case QVideoFrameFormat::Format_SamplerRect: |
821 | return QImage::Format_Invalid; |
822 | } |
823 | return QImage::Format_Invalid; |
824 | } |
825 | |
826 | /*! |
827 | Returns a string representation of the given \a pixelFormat. |
828 | */ |
829 | QString QVideoFrameFormat::pixelFormatToString(QVideoFrameFormat::PixelFormat pixelFormat) |
830 | { |
831 | switch (pixelFormat) { |
832 | case QVideoFrameFormat::Format_Invalid: |
833 | return QStringLiteral("Invalid" ); |
834 | case QVideoFrameFormat::Format_ARGB8888: |
835 | return QStringLiteral("ARGB8888" ); |
836 | case QVideoFrameFormat::Format_ARGB8888_Premultiplied: |
837 | return QStringLiteral("ARGB8888 Premultiplied" ); |
838 | case QVideoFrameFormat::Format_XRGB8888: |
839 | return QStringLiteral("XRGB8888" ); |
840 | case QVideoFrameFormat::Format_BGRA8888: |
841 | return QStringLiteral("BGRA8888" ); |
842 | case QVideoFrameFormat::Format_BGRX8888: |
843 | return QStringLiteral("BGRX8888" ); |
844 | case QVideoFrameFormat::Format_BGRA8888_Premultiplied: |
845 | return QStringLiteral("BGRA8888 Premultiplied" ); |
846 | case QVideoFrameFormat::Format_RGBA8888: |
847 | return QStringLiteral("RGBA8888" ); |
848 | case QVideoFrameFormat::Format_RGBX8888: |
849 | return QStringLiteral("RGBX8888" ); |
850 | case QVideoFrameFormat::Format_ABGR8888: |
851 | return QStringLiteral("ABGR8888" ); |
852 | case QVideoFrameFormat::Format_XBGR8888: |
853 | return QStringLiteral("XBGR8888" ); |
854 | case QVideoFrameFormat::Format_AYUV: |
855 | return QStringLiteral("AYUV" ); |
856 | case QVideoFrameFormat::Format_AYUV_Premultiplied: |
857 | return QStringLiteral("AYUV Premultiplied" ); |
858 | case QVideoFrameFormat::Format_YUV420P: |
859 | return QStringLiteral("YUV420P" ); |
860 | case QVideoFrameFormat::Format_YUV420P10: |
861 | return QStringLiteral("YUV420P10" ); |
862 | case QVideoFrameFormat::Format_YUV422P: |
863 | return QStringLiteral("YUV422P" ); |
864 | case QVideoFrameFormat::Format_YV12: |
865 | return QStringLiteral("YV12" ); |
866 | case QVideoFrameFormat::Format_UYVY: |
867 | return QStringLiteral("UYVY" ); |
868 | case QVideoFrameFormat::Format_YUYV: |
869 | return QStringLiteral("YUYV" ); |
870 | case QVideoFrameFormat::Format_NV12: |
871 | return QStringLiteral("NV12" ); |
872 | case QVideoFrameFormat::Format_NV21: |
873 | return QStringLiteral("NV21" ); |
874 | case QVideoFrameFormat::Format_IMC1: |
875 | return QStringLiteral("IMC1" ); |
876 | case QVideoFrameFormat::Format_IMC2: |
877 | return QStringLiteral("IMC2" ); |
878 | case QVideoFrameFormat::Format_IMC3: |
879 | return QStringLiteral("IMC3" ); |
880 | case QVideoFrameFormat::Format_IMC4: |
881 | return QStringLiteral("IMC4" ); |
882 | case QVideoFrameFormat::Format_Y8: |
883 | return QStringLiteral("Y8" ); |
884 | case QVideoFrameFormat::Format_Y16: |
885 | return QStringLiteral("Y16" ); |
886 | case QVideoFrameFormat::Format_P010: |
887 | return QStringLiteral("P010" ); |
888 | case QVideoFrameFormat::Format_P016: |
889 | return QStringLiteral("P016" ); |
890 | case QVideoFrameFormat::Format_SamplerExternalOES: |
891 | return QStringLiteral("SamplerExternalOES" ); |
892 | case QVideoFrameFormat::Format_Jpeg: |
893 | return QStringLiteral("Jpeg" ); |
894 | case QVideoFrameFormat::Format_SamplerRect: |
895 | return QStringLiteral("SamplerRect" ); |
896 | } |
897 | |
898 | return QStringLiteral("" ); |
899 | } |
900 | |
901 | #ifndef QT_NO_DEBUG_STREAM |
902 | # if QT_DEPRECATED_SINCE(6, 4) |
903 | QDebug operator<<(QDebug dbg, QVideoFrameFormat::YCbCrColorSpace cs) |
904 | { |
905 | QDebugStateSaver saver(dbg); |
906 | dbg.nospace(); |
907 | switch (cs) { |
908 | case QVideoFrameFormat::YCbCr_BT601: |
909 | dbg << "YCbCr_BT601" ; |
910 | break; |
911 | case QVideoFrameFormat::YCbCr_BT709: |
912 | dbg << "YCbCr_BT709" ; |
913 | break; |
914 | case QVideoFrameFormat::YCbCr_JPEG: |
915 | dbg << "YCbCr_JPEG" ; |
916 | break; |
917 | case QVideoFrameFormat::YCbCr_xvYCC601: |
918 | dbg << "YCbCr_xvYCC601" ; |
919 | break; |
920 | case QVideoFrameFormat::YCbCr_xvYCC709: |
921 | dbg << "YCbCr_xvYCC709" ; |
922 | break; |
923 | case QVideoFrameFormat::YCbCr_BT2020: |
924 | dbg << "YCbCr_BT2020" ; |
925 | break; |
926 | default: |
927 | dbg << "YCbCr_Undefined" ; |
928 | break; |
929 | } |
930 | return dbg; |
931 | } |
932 | # endif // QT_DEPRECATED_SINCE(6, 4) |
933 | |
934 | QDebug operator<<(QDebug dbg, QVideoFrameFormat::ColorSpace cs) |
935 | { |
936 | QDebugStateSaver saver(dbg); |
937 | dbg.nospace(); |
938 | switch (cs) { |
939 | case QVideoFrameFormat::ColorSpace_BT601: |
940 | dbg << "ColorSpace_BT601" ; |
941 | break; |
942 | case QVideoFrameFormat::ColorSpace_BT709: |
943 | dbg << "ColorSpace_BT709" ; |
944 | break; |
945 | case QVideoFrameFormat::ColorSpace_AdobeRgb: |
946 | dbg << "ColorSpace_AdobeRgb" ; |
947 | break; |
948 | case QVideoFrameFormat::ColorSpace_BT2020: |
949 | dbg << "ColorSpace_BT2020" ; |
950 | break; |
951 | default: |
952 | dbg << "ColorSpace_Undefined" ; |
953 | break; |
954 | } |
955 | return dbg; |
956 | } |
957 | |
958 | QDebug operator<<(QDebug dbg, QVideoFrameFormat::Direction dir) |
959 | { |
960 | QDebugStateSaver saver(dbg); |
961 | dbg.nospace(); |
962 | switch (dir) { |
963 | case QVideoFrameFormat::BottomToTop: |
964 | dbg << "BottomToTop" ; |
965 | break; |
966 | case QVideoFrameFormat::TopToBottom: |
967 | dbg << "TopToBottom" ; |
968 | break; |
969 | } |
970 | return dbg; |
971 | } |
972 | |
973 | QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f) |
974 | { |
975 | QDebugStateSaver saver(dbg); |
976 | dbg.nospace(); |
977 | dbg << "QVideoFrameFormat(" << f.pixelFormat() << ", " << f.frameSize() |
978 | << ", viewport=" << f.viewport() |
979 | << ", colorSpace=" << f.colorSpace() |
980 | << ')' |
981 | << "\n pixel format=" << f.pixelFormat() |
982 | << "\n frame size=" << f.frameSize() |
983 | << "\n viewport=" << f.viewport() |
984 | << "\n colorSpace=" << f.colorSpace() |
985 | << "\n frameRate=" << f.frameRate() |
986 | << "\n mirrored=" << f.isMirrored(); |
987 | |
988 | return dbg; |
989 | } |
990 | |
991 | QDebug operator<<(QDebug dbg, QVideoFrameFormat::PixelFormat pf) |
992 | { |
993 | QDebugStateSaver saver(dbg); |
994 | dbg.nospace(); |
995 | |
996 | auto format = QVideoFrameFormat::pixelFormatToString(pixelFormat: pf); |
997 | if (format.isEmpty()) |
998 | return dbg; |
999 | |
1000 | dbg.noquote() << QStringLiteral("Format_" ) << format; |
1001 | return dbg; |
1002 | } |
1003 | #endif |
1004 | |
1005 | QT_END_NAMESPACE |
1006 | |