1 | /**************************************************************************** |
2 | ** |
3 | ** Copyright (C) 2016 The Qt Company Ltd. |
4 | ** Contact: https://www.qt.io/licensing/ |
5 | ** |
6 | ** This file is part of the Qt Toolkit. |
7 | ** |
8 | ** $QT_BEGIN_LICENSE:LGPL$ |
9 | ** Commercial License Usage |
10 | ** Licensees holding valid commercial Qt licenses may use this file in |
11 | ** accordance with the commercial license agreement provided with the |
12 | ** Software or, alternatively, in accordance with the terms contained in |
13 | ** a written agreement between you and The Qt Company. For licensing terms |
14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
15 | ** information use the contact form at https://www.qt.io/contact-us. |
16 | ** |
17 | ** GNU Lesser General Public License Usage |
18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
19 | ** General Public License version 3 as published by the Free Software |
20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
21 | ** packaging of this file. Please review the following information to |
22 | ** ensure the GNU Lesser General Public License version 3 requirements |
23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
24 | ** |
25 | ** GNU General Public License Usage |
26 | ** Alternatively, this file may be used under the terms of the GNU |
27 | ** General Public License version 2.0 or (at your option) the GNU General |
28 | ** Public license version 3 or any later version approved by the KDE Free |
29 | ** Qt Foundation. The licenses are as published by the Free Software |
30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
31 | ** included in the packaging of this file. Please review the following |
32 | ** information to ensure the GNU General Public License requirements will |
33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
35 | ** |
36 | ** $QT_END_LICENSE$ |
37 | ** |
38 | ****************************************************************************/ |
39 | |
40 | #include "qvideoframe.h" |
41 | |
42 | #include "qimagevideobuffer_p.h" |
43 | #include "qmemoryvideobuffer_p.h" |
44 | #include "qvideoframeconversionhelper_p.h" |
45 | |
46 | #include <qimage.h> |
47 | #include <qpair.h> |
48 | #include <qsize.h> |
49 | #include <qvariant.h> |
50 | #include <qvector.h> |
51 | #include <qmutex.h> |
52 | |
53 | #include <QDebug> |
54 | |
55 | QT_BEGIN_NAMESPACE |
56 | |
57 | static void qRegisterVideoFrameMetaTypes() |
58 | { |
59 | qRegisterMetaType<QVideoFrame>(); |
60 | qRegisterMetaType<QVideoFrame::FieldType>(); |
61 | qRegisterMetaType<QVideoFrame::PixelFormat>(); |
62 | } |
63 | |
64 | Q_CONSTRUCTOR_FUNCTION(qRegisterVideoFrameMetaTypes) |
65 | |
66 | |
67 | class QVideoFramePrivate : public QSharedData |
68 | { |
69 | public: |
70 | QVideoFramePrivate() |
71 | : startTime(-1) |
72 | , endTime(-1) |
73 | , mappedBytes(0) |
74 | , planeCount(0) |
75 | , pixelFormat(QVideoFrame::Format_Invalid) |
76 | , fieldType(QVideoFrame::ProgressiveFrame) |
77 | , buffer(nullptr) |
78 | , mappedCount(0) |
79 | { |
80 | memset(s: data, c: 0, n: sizeof(data)); |
81 | memset(s: bytesPerLine, c: 0, n: sizeof(bytesPerLine)); |
82 | } |
83 | |
84 | QVideoFramePrivate(const QSize &size, QVideoFrame::PixelFormat format) |
85 | : size(size) |
86 | , startTime(-1) |
87 | , endTime(-1) |
88 | , mappedBytes(0) |
89 | , planeCount(0) |
90 | , pixelFormat(format) |
91 | , fieldType(QVideoFrame::ProgressiveFrame) |
92 | , buffer(nullptr) |
93 | , mappedCount(0) |
94 | { |
95 | memset(s: data, c: 0, n: sizeof(data)); |
96 | memset(s: bytesPerLine, c: 0, n: sizeof(bytesPerLine)); |
97 | } |
98 | |
99 | ~QVideoFramePrivate() |
100 | { |
101 | if (buffer) |
102 | buffer->release(); |
103 | } |
104 | |
105 | QSize size; |
106 | qint64 startTime; |
107 | qint64 endTime; |
108 | uchar *data[4]; |
109 | int bytesPerLine[4]; |
110 | int mappedBytes; |
111 | int planeCount; |
112 | QVideoFrame::PixelFormat pixelFormat; |
113 | QVideoFrame::FieldType fieldType; |
114 | QAbstractVideoBuffer *buffer; |
115 | int mappedCount; |
116 | QMutex mapMutex; |
117 | QVariantMap metadata; |
118 | |
119 | private: |
120 | Q_DISABLE_COPY(QVideoFramePrivate) |
121 | }; |
122 | |
123 | /*! |
124 | \class QVideoFrame |
125 | \brief The QVideoFrame class represents a frame of video data. |
126 | \inmodule QtMultimedia |
127 | |
128 | \ingroup multimedia |
129 | \ingroup multimedia_video |
130 | |
131 | A QVideoFrame encapsulates the pixel data of a video frame, and information about the frame. |
132 | |
133 | Video frames can come from several places - decoded \l {QMediaPlayer}{media}, a |
134 | \l {QCamera}{camera}, or generated programmatically. The way pixels are described in these |
135 | frames can vary greatly, and some pixel formats offer greater compression opportunities at |
136 | the expense of ease of use. |
137 | |
138 | The pixel contents of a video frame can be mapped to memory using the map() function. While |
139 | mapped, the video data can accessed using the bits() function, which returns a pointer to a |
140 | buffer. The total size of this buffer is given by the mappedBytes() function, and the size of |
141 | each line is given by bytesPerLine(). The return value of the handle() function may also be |
142 | used to access frame data using the internal buffer's native APIs (for example - an OpenGL |
143 | texture handle). |
144 | |
145 | A video frame can also have timestamp information associated with it. These timestamps can be |
146 | used by an implementation of \l QAbstractVideoSurface to determine when to start and stop |
147 | displaying the frame, but not all surfaces might respect this setting. |
148 | |
149 | The video pixel data in a QVideoFrame is encapsulated in a QAbstractVideoBuffer. A QVideoFrame |
150 | may be constructed from any buffer type by subclassing the QAbstractVideoBuffer class. |
151 | |
152 | \note Since video frames can be expensive to copy, QVideoFrame is explicitly shared, so any |
153 | change made to a video frame will also apply to any copies. |
154 | */ |
155 | |
156 | /*! |
157 | \enum QVideoFrame::PixelFormat |
158 | |
159 | Enumerates video data types. |
160 | |
161 | \value Format_Invalid |
162 | The frame is invalid. |
163 | |
164 | \value Format_ARGB32 |
165 | The frame is stored using a 32-bit ARGB format (0xAARRGGBB). This is equivalent to |
166 | QImage::Format_ARGB32. |
167 | |
168 | \value Format_ARGB32_Premultiplied |
169 | The frame stored using a premultiplied 32-bit ARGB format (0xAARRGGBB). This is equivalent |
170 | to QImage::Format_ARGB32_Premultiplied. |
171 | |
172 | \value Format_RGB32 |
173 | The frame stored using a 32-bit RGB format (0xffRRGGBB). This is equivalent to |
174 | QImage::Format_RGB32 |
175 | |
176 | \value Format_RGB24 |
177 | The frame is stored using a 24-bit RGB format (8-8-8). This is equivalent to |
178 | QImage::Format_RGB888 |
179 | |
180 | \value Format_RGB565 |
181 | The frame is stored using a 16-bit RGB format (5-6-5). This is equivalent to |
182 | QImage::Format_RGB16. |
183 | |
184 | \value Format_RGB555 |
185 | The frame is stored using a 16-bit RGB format (5-5-5). This is equivalent to |
186 | QImage::Format_RGB555. |
187 | |
188 | \value Format_ARGB8565_Premultiplied |
189 | The frame is stored using a 24-bit premultiplied ARGB format (8-5-6-5). |
190 | |
191 | \value Format_BGRA32 |
192 | The frame is stored using a 32-bit BGRA format (0xBBGGRRAA). |
193 | |
194 | \value Format_BGRA32_Premultiplied |
195 | The frame is stored using a premultiplied 32bit BGRA format. |
196 | |
197 | \value Format_ABGR32 |
198 | The frame is stored using a 32-bit ABGR format (0xAABBGGRR). |
199 | |
200 | \value Format_BGR32 |
201 | The frame is stored using a 32-bit BGR format (0xBBGGRRff). |
202 | |
203 | \value Format_BGR24 |
204 | The frame is stored using a 24-bit BGR format (0xBBGGRR). |
205 | |
206 | \value Format_BGR565 |
207 | The frame is stored using a 16-bit BGR format (5-6-5). |
208 | |
209 | \value Format_BGR555 |
210 | The frame is stored using a 16-bit BGR format (5-5-5). |
211 | |
212 | \value Format_BGRA5658_Premultiplied |
213 | The frame is stored using a 24-bit premultiplied BGRA format (5-6-5-8). |
214 | |
215 | \value Format_AYUV444 |
216 | The frame is stored using a packed 32-bit AYUV format (0xAAYYUUVV). |
217 | |
218 | \value Format_AYUV444_Premultiplied |
219 | The frame is stored using a packed premultiplied 32-bit AYUV format (0xAAYYUUVV). |
220 | |
221 | \value Format_YUV444 |
222 | The frame is stored using a 24-bit packed YUV format (8-8-8). |
223 | |
224 | \value Format_YUV420P |
225 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
226 | horizontally and vertically sub-sampled, i.e. the height and width of the U and V planes are |
227 | half that of the Y plane. |
228 | |
229 | \value Format_YUV422P |
230 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
231 | horizontally sub-sampled, i.e. the width of the U and V planes are |
232 | half that of the Y plane, and height of U and V planes is the same as Y. |
233 | |
234 | \value Format_YV12 |
235 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
236 | horizontally and vertically sub-sampled, i.e. the height and width of the V and U planes are |
237 | half that of the Y plane. |
238 | |
239 | \value Format_UYVY |
240 | The frame is stored using an 8-bit per component packed YUV format with the U and V planes |
241 | horizontally sub-sampled (U-Y-V-Y), i.e. two horizontally adjacent pixels are stored as a 32-bit |
242 | macropixel which has a Y value for each pixel and common U and V values. |
243 | |
244 | \value Format_YUYV |
245 | The frame is stored using an 8-bit per component packed YUV format with the U and V planes |
246 | horizontally sub-sampled (Y-U-Y-V), i.e. two horizontally adjacent pixels are stored as a 32-bit |
247 | macropixel which has a Y value for each pixel and common U and V values. |
248 | |
249 | \value Format_NV12 |
250 | The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y) |
251 | followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). |
252 | |
253 | \value Format_NV21 |
254 | The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y) |
255 | followed by a horizontally and vertically sub-sampled, packed VU plane (V-U). |
256 | |
257 | \value Format_IMC1 |
258 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
259 | horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except |
260 | that the bytes per line of the U and V planes are padded out to the same stride as the Y plane. |
261 | |
262 | \value Format_IMC2 |
263 | The frame is stored using an 8-bit per component planar YUV format with the U and V planes |
264 | horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except |
265 | that the lines of the U and V planes are interleaved, i.e. each line of U data is followed by a |
266 | line of V data creating a single line of the same stride as the Y data. |
267 | |
268 | \value Format_IMC3 |
269 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
270 | horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that |
271 | the bytes per line of the V and U planes are padded out to the same stride as the Y plane. |
272 | |
273 | \value Format_IMC4 |
274 | The frame is stored using an 8-bit per component planar YVU format with the V and U planes |
275 | horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that |
276 | the lines of the V and U planes are interleaved, i.e. each line of V data is followed by a line |
277 | of U data creating a single line of the same stride as the Y data. |
278 | |
279 | \value Format_Y8 |
280 | The frame is stored using an 8-bit greyscale format. |
281 | |
282 | \value Format_Y16 |
283 | The frame is stored using a 16-bit linear greyscale format. Little endian. |
284 | |
285 | \value Format_Jpeg |
286 | The frame is stored in compressed Jpeg format. |
287 | |
288 | \value Format_CameraRaw |
289 | The frame is stored using a device specific camera raw format. |
290 | |
291 | \value Format_AdobeDng |
292 | The frame is stored using raw Adobe Digital Negative (DNG) format. |
293 | |
294 | \value Format_User |
295 | Start value for user defined pixel formats. |
296 | */ |
297 | |
298 | /*! |
299 | \enum QVideoFrame::FieldType |
300 | |
301 | Specifies the field an interlaced video frame belongs to. |
302 | |
303 | \value ProgressiveFrame The frame is not interlaced. |
304 | \value TopField The frame contains a top field. |
305 | \value BottomField The frame contains a bottom field. |
306 | \value InterlacedFrame The frame contains a merged top and bottom field. |
307 | */ |
308 | |
309 | /*! |
310 | Constructs a null video frame. |
311 | */ |
312 | QVideoFrame::QVideoFrame() |
313 | : d(new QVideoFramePrivate) |
314 | { |
315 | } |
316 | |
317 | /*! |
318 | Constructs a video frame from a \a buffer with the given pixel \a format and \a size in pixels. |
319 | |
320 | \note This doesn't increment the reference count of the video buffer. |
321 | */ |
322 | QVideoFrame::QVideoFrame( |
323 | QAbstractVideoBuffer *buffer, const QSize &size, PixelFormat format) |
324 | : d(new QVideoFramePrivate(size, format)) |
325 | { |
326 | d->buffer = buffer; |
327 | } |
328 | |
329 | /*! |
330 | Constructs a video frame of the given pixel \a format and \a size in pixels. |
331 | |
332 | The \a bytesPerLine (stride) is the length of each scan line in bytes, and \a bytes is the total |
333 | number of bytes that must be allocated for the frame. |
334 | */ |
335 | QVideoFrame::QVideoFrame(int bytes, const QSize &size, int bytesPerLine, PixelFormat format) |
336 | : d(new QVideoFramePrivate(size, format)) |
337 | { |
338 | if (bytes > 0) { |
339 | QByteArray data; |
340 | data.resize(size: bytes); |
341 | |
342 | // Check the memory was successfully allocated. |
343 | if (!data.isEmpty()) |
344 | d->buffer = new QMemoryVideoBuffer(data, bytesPerLine); |
345 | } |
346 | } |
347 | |
348 | /*! |
349 | Constructs a video frame from an \a image. |
350 | |
351 | \note This will construct an invalid video frame if there is no frame type equivalent to the |
352 | image format. |
353 | |
354 | \sa pixelFormatFromImageFormat() |
355 | */ |
356 | QVideoFrame::QVideoFrame(const QImage &image) |
357 | : d(new QVideoFramePrivate( |
358 | image.size(), pixelFormatFromImageFormat(format: image.format()))) |
359 | { |
360 | if (d->pixelFormat != Format_Invalid) |
361 | d->buffer = new QImageVideoBuffer(image); |
362 | } |
363 | |
364 | /*! |
365 | Constructs a shallow copy of \a other. Since QVideoFrame is |
366 | explicitly shared, these two instances will reflect the same frame. |
367 | |
368 | */ |
369 | QVideoFrame::QVideoFrame(const QVideoFrame &other) |
370 | : d(other.d) |
371 | { |
372 | } |
373 | |
374 | /*! |
375 | Assigns the contents of \a other to this video frame. Since QVideoFrame is |
376 | explicitly shared, these two instances will reflect the same frame. |
377 | |
378 | */ |
379 | QVideoFrame &QVideoFrame::operator =(const QVideoFrame &other) |
380 | { |
381 | d = other.d; |
382 | |
383 | return *this; |
384 | } |
385 | |
386 | /*! |
387 | \return \c true if this QVideoFrame and \a other reflect the same frame. |
388 | */ |
389 | bool QVideoFrame::operator==(const QVideoFrame &other) const |
390 | { |
391 | // Due to explicit sharing we just compare the QSharedData which in turn compares the pointers. |
392 | return d == other.d; |
393 | } |
394 | |
395 | /*! |
396 | \return \c true if this QVideoFrame and \a other do not reflect the same frame. |
397 | */ |
398 | bool QVideoFrame::operator!=(const QVideoFrame &other) const |
399 | { |
400 | return d != other.d; |
401 | } |
402 | |
403 | /*! |
404 | Destroys a video frame. |
405 | */ |
406 | QVideoFrame::~QVideoFrame() |
407 | { |
408 | } |
409 | |
410 | /*! |
411 | \return underlying video buffer or \c null if there is none. |
412 | \since 5.13 |
413 | */ |
414 | QAbstractVideoBuffer *QVideoFrame::buffer() const |
415 | { |
416 | return d->buffer; |
417 | } |
418 | |
419 | /*! |
420 | Identifies whether a video frame is valid. |
421 | |
422 | An invalid frame has no video buffer associated with it. |
423 | |
424 | Returns true if the frame is valid, and false if it is not. |
425 | */ |
426 | bool QVideoFrame::isValid() const |
427 | { |
428 | return d->buffer != nullptr; |
429 | } |
430 | |
431 | /*! |
432 | Returns the color format of a video frame. |
433 | */ |
434 | QVideoFrame::PixelFormat QVideoFrame::pixelFormat() const |
435 | { |
436 | return d->pixelFormat; |
437 | } |
438 | |
439 | /*! |
440 | Returns the type of a video frame's handle. |
441 | |
442 | */ |
443 | QAbstractVideoBuffer::HandleType QVideoFrame::handleType() const |
444 | { |
445 | return d->buffer ? d->buffer->handleType() : QAbstractVideoBuffer::NoHandle; |
446 | } |
447 | |
448 | /*! |
449 | Returns the dimensions of a video frame. |
450 | */ |
451 | QSize QVideoFrame::size() const |
452 | { |
453 | return d->size; |
454 | } |
455 | |
456 | /*! |
457 | Returns the width of a video frame. |
458 | */ |
459 | int QVideoFrame::width() const |
460 | { |
461 | return d->size.width(); |
462 | } |
463 | |
464 | /*! |
465 | Returns the height of a video frame. |
466 | */ |
467 | int QVideoFrame::height() const |
468 | { |
469 | return d->size.height(); |
470 | } |
471 | |
472 | /*! |
473 | Returns the field an interlaced video frame belongs to. |
474 | |
475 | If the video is not interlaced this will return WholeFrame. |
476 | */ |
477 | QVideoFrame::FieldType QVideoFrame::fieldType() const |
478 | { |
479 | return d->fieldType; |
480 | } |
481 | |
482 | /*! |
483 | Sets the \a field an interlaced video frame belongs to. |
484 | */ |
485 | void QVideoFrame::setFieldType(QVideoFrame::FieldType field) |
486 | { |
487 | d->fieldType = field; |
488 | } |
489 | |
490 | /*! |
491 | Identifies if a video frame's contents are currently mapped to system memory. |
492 | |
493 | This is a convenience function which checks that the \l {QAbstractVideoBuffer::MapMode}{MapMode} |
494 | of the frame is not equal to QAbstractVideoBuffer::NotMapped. |
495 | |
496 | Returns true if the contents of the video frame are mapped to system memory, and false |
497 | otherwise. |
498 | |
499 | \sa mapMode(), QAbstractVideoBuffer::MapMode |
500 | */ |
501 | |
502 | bool QVideoFrame::isMapped() const |
503 | { |
504 | return d->buffer != nullptr && d->buffer->mapMode() != QAbstractVideoBuffer::NotMapped; |
505 | } |
506 | |
507 | /*! |
508 | Identifies if the mapped contents of a video frame will be persisted when the frame is unmapped. |
509 | |
510 | This is a convenience function which checks if the \l {QAbstractVideoBuffer::MapMode}{MapMode} |
511 | contains the QAbstractVideoBuffer::WriteOnly flag. |
512 | |
513 | Returns true if the video frame will be updated when unmapped, and false otherwise. |
514 | |
515 | \note The result of altering the data of a frame that is mapped in read-only mode is undefined. |
516 | Depending on the buffer implementation the changes may be persisted, or worse alter a shared |
517 | buffer. |
518 | |
519 | \sa mapMode(), QAbstractVideoBuffer::MapMode |
520 | */ |
521 | bool QVideoFrame::isWritable() const |
522 | { |
523 | return d->buffer != nullptr && (d->buffer->mapMode() & QAbstractVideoBuffer::WriteOnly); |
524 | } |
525 | |
526 | /*! |
527 | Identifies if the mapped contents of a video frame were read from the frame when it was mapped. |
528 | |
529 | This is a convenience function which checks if the \l {QAbstractVideoBuffer::MapMode}{MapMode} |
530 | contains the QAbstractVideoBuffer::WriteOnly flag. |
531 | |
532 | Returns true if the contents of the mapped memory were read from the video frame, and false |
533 | otherwise. |
534 | |
535 | \sa mapMode(), QAbstractVideoBuffer::MapMode |
536 | */ |
537 | bool QVideoFrame::isReadable() const |
538 | { |
539 | return d->buffer != nullptr && (d->buffer->mapMode() & QAbstractVideoBuffer::ReadOnly); |
540 | } |
541 | |
542 | /*! |
543 | Returns the mode a video frame was mapped to system memory in. |
544 | |
545 | \sa map(), QAbstractVideoBuffer::MapMode |
546 | */ |
547 | QAbstractVideoBuffer::MapMode QVideoFrame::mapMode() const |
548 | { |
549 | return d->buffer != nullptr ? d->buffer->mapMode() : QAbstractVideoBuffer::NotMapped; |
550 | } |
551 | |
552 | /*! |
553 | Maps the contents of a video frame to system (CPU addressable) memory. |
554 | |
555 | In some cases the video frame data might be stored in video memory or otherwise inaccessible |
556 | memory, so it is necessary to map a frame before accessing the pixel data. This may involve |
557 | copying the contents around, so avoid mapping and unmapping unless required. |
558 | |
559 | The map \a mode indicates whether the contents of the mapped memory should be read from and/or |
560 | written to the frame. If the map mode includes the \c QAbstractVideoBuffer::ReadOnly flag the |
561 | mapped memory will be populated with the content of the video frame when initially mapped. If the map |
562 | mode includes the \c QAbstractVideoBuffer::WriteOnly flag the content of the possibly modified |
563 | mapped memory will be written back to the frame when unmapped. |
564 | |
565 | While mapped the contents of a video frame can be accessed directly through the pointer returned |
566 | by the bits() function. |
567 | |
568 | When access to the data is no longer needed, be sure to call the unmap() function to release the |
569 | mapped memory and possibly update the video frame contents. |
570 | |
571 | If the video frame has been mapped in read only mode, it is permissible to map it |
572 | multiple times in read only mode (and unmap it a corresponding number of times). In all |
573 | other cases it is necessary to unmap the frame first before mapping a second time. |
574 | |
575 | \note Writing to memory that is mapped as read-only is undefined, and may result in changes |
576 | to shared data or crashes. |
577 | |
578 | Returns true if the frame was mapped to memory in the given \a mode and false otherwise. |
579 | |
580 | \sa unmap(), mapMode(), bits() |
581 | */ |
582 | bool QVideoFrame::map(QAbstractVideoBuffer::MapMode mode) |
583 | { |
584 | QMutexLocker lock(&d->mapMutex); |
585 | |
586 | if (!d->buffer) |
587 | return false; |
588 | |
589 | if (mode == QAbstractVideoBuffer::NotMapped) |
590 | return false; |
591 | |
592 | if (d->mappedCount > 0) { |
593 | //it's allowed to map the video frame multiple times in read only mode |
594 | if (d->buffer->mapMode() == QAbstractVideoBuffer::ReadOnly |
595 | && mode == QAbstractVideoBuffer::ReadOnly) { |
596 | d->mappedCount++; |
597 | return true; |
598 | } else { |
599 | return false; |
600 | } |
601 | } |
602 | |
603 | Q_ASSERT(d->data[0] == nullptr); |
604 | Q_ASSERT(d->bytesPerLine[0] == 0); |
605 | Q_ASSERT(d->planeCount == 0); |
606 | Q_ASSERT(d->mappedBytes == 0); |
607 | |
608 | d->planeCount = d->buffer->mapPlanes(mode, numBytes: &d->mappedBytes, bytesPerLine: d->bytesPerLine, data: d->data); |
609 | if (d->planeCount == 0) |
610 | return false; |
611 | |
612 | if (d->planeCount > 1) { |
613 | // If the plane count is derive the additional planes for planar formats. |
614 | } else switch (d->pixelFormat) { |
615 | case Format_Invalid: |
616 | case Format_ARGB32: |
617 | case Format_ARGB32_Premultiplied: |
618 | case Format_RGB32: |
619 | case Format_RGB24: |
620 | case Format_RGB565: |
621 | case Format_RGB555: |
622 | case Format_ARGB8565_Premultiplied: |
623 | case Format_BGRA32: |
624 | case Format_BGRA32_Premultiplied: |
625 | case Format_ABGR32: |
626 | case Format_BGR32: |
627 | case Format_BGR24: |
628 | case Format_BGR565: |
629 | case Format_BGR555: |
630 | case Format_BGRA5658_Premultiplied: |
631 | case Format_AYUV444: |
632 | case Format_AYUV444_Premultiplied: |
633 | case Format_YUV444: |
634 | case Format_UYVY: |
635 | case Format_YUYV: |
636 | case Format_Y8: |
637 | case Format_Y16: |
638 | case Format_Jpeg: |
639 | case Format_CameraRaw: |
640 | case Format_AdobeDng: |
641 | case Format_User: |
642 | // Single plane or opaque format. |
643 | break; |
644 | case Format_YUV420P: |
645 | case Format_YUV422P: |
646 | case Format_YV12: { |
647 | // The UV stride is usually half the Y stride and is 32-bit aligned. |
648 | // However it's not always the case, at least on Windows where the |
649 | // UV planes are sometimes not aligned. |
650 | // We calculate the stride using the UV byte count to always |
651 | // have a correct stride. |
652 | const int height = d->size.height(); |
653 | const int yStride = d->bytesPerLine[0]; |
654 | const int uvHeight = d->pixelFormat == Format_YUV422P ? height : height / 2; |
655 | const int uvStride = (d->mappedBytes - (yStride * height)) / uvHeight / 2; |
656 | |
657 | // Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled. |
658 | d->planeCount = 3; |
659 | d->bytesPerLine[2] = d->bytesPerLine[1] = uvStride; |
660 | d->data[1] = d->data[0] + (yStride * height); |
661 | d->data[2] = d->data[1] + (uvStride * uvHeight); |
662 | break; |
663 | } |
664 | case Format_NV12: |
665 | case Format_NV21: |
666 | case Format_IMC2: |
667 | case Format_IMC4: { |
668 | // Semi planar, Full resolution Y plane with interleaved subsampled U and V planes. |
669 | d->planeCount = 2; |
670 | d->bytesPerLine[1] = d->bytesPerLine[0]; |
671 | d->data[1] = d->data[0] + (d->bytesPerLine[0] * d->size.height()); |
672 | break; |
673 | } |
674 | case Format_IMC1: |
675 | case Format_IMC3: { |
676 | // Three planes, the second and third vertically and horizontally subsumpled, |
677 | // but with lines padded to the width of the first plane. |
678 | d->planeCount = 3; |
679 | d->bytesPerLine[2] = d->bytesPerLine[1] = d->bytesPerLine[0]; |
680 | d->data[1] = d->data[0] + (d->bytesPerLine[0] * d->size.height()); |
681 | d->data[2] = d->data[1] + (d->bytesPerLine[1] * d->size.height() / 2); |
682 | break; |
683 | } |
684 | default: |
685 | break; |
686 | } |
687 | |
688 | d->mappedCount++; |
689 | return true; |
690 | } |
691 | |
692 | /*! |
693 | Releases the memory mapped by the map() function. |
694 | |
695 | If the \l {QAbstractVideoBuffer::MapMode}{MapMode} included the QAbstractVideoBuffer::WriteOnly |
696 | flag this will persist the current content of the mapped memory to the video frame. |
697 | |
698 | unmap() should not be called if map() function failed. |
699 | |
700 | \sa map() |
701 | */ |
702 | void QVideoFrame::unmap() |
703 | { |
704 | QMutexLocker lock(&d->mapMutex); |
705 | |
706 | if (!d->buffer) |
707 | return; |
708 | |
709 | if (d->mappedCount == 0) { |
710 | qWarning() << "QVideoFrame::unmap() was called more times then QVideoFrame::map()" ; |
711 | return; |
712 | } |
713 | |
714 | d->mappedCount--; |
715 | |
716 | if (d->mappedCount == 0) { |
717 | d->mappedBytes = 0; |
718 | d->planeCount = 0; |
719 | memset(s: d->bytesPerLine, c: 0, n: sizeof(d->bytesPerLine)); |
720 | memset(s: d->data, c: 0, n: sizeof(d->data)); |
721 | |
722 | d->buffer->unmap(); |
723 | } |
724 | } |
725 | |
726 | /*! |
727 | Returns the number of bytes in a scan line. |
728 | |
729 | \note For planar formats this is the bytes per line of the first plane only. The bytes per line of subsequent |
730 | planes should be calculated as per the frame \l{QVideoFrame::PixelFormat}{pixel format}. |
731 | |
732 | This value is only valid while the frame data is \l {map()}{mapped}. |
733 | |
734 | \sa bits(), map(), mappedBytes() |
735 | */ |
736 | int QVideoFrame::bytesPerLine() const |
737 | { |
738 | return d->bytesPerLine[0]; |
739 | } |
740 | |
741 | /*! |
742 | Returns the number of bytes in a scan line of a \a plane. |
743 | |
744 | This value is only valid while the frame data is \l {map()}{mapped}. |
745 | |
746 | \sa bits(), map(), mappedBytes(), planeCount() |
747 | \since 5.4 |
748 | */ |
749 | |
750 | int QVideoFrame::bytesPerLine(int plane) const |
751 | { |
752 | return plane >= 0 && plane < d->planeCount ? d->bytesPerLine[plane] : 0; |
753 | } |
754 | |
755 | /*! |
756 | Returns a pointer to the start of the frame data buffer. |
757 | |
758 | This value is only valid while the frame data is \l {map()}{mapped}. |
759 | |
760 | Changes made to data accessed via this pointer (when mapped with write access) |
761 | are only guaranteed to have been persisted when unmap() is called and when the |
762 | buffer has been mapped for writing. |
763 | |
764 | \sa map(), mappedBytes(), bytesPerLine() |
765 | */ |
766 | uchar *QVideoFrame::bits() |
767 | { |
768 | return d->data[0]; |
769 | } |
770 | |
771 | /*! |
772 | Returns a pointer to the start of the frame data buffer for a \a plane. |
773 | |
774 | This value is only valid while the frame data is \l {map()}{mapped}. |
775 | |
776 | Changes made to data accessed via this pointer (when mapped with write access) |
777 | are only guaranteed to have been persisted when unmap() is called and when the |
778 | buffer has been mapped for writing. |
779 | |
780 | \sa map(), mappedBytes(), bytesPerLine(), planeCount() |
781 | \since 5.4 |
782 | */ |
783 | uchar *QVideoFrame::bits(int plane) |
784 | { |
785 | return plane >= 0 && plane < d->planeCount ? d->data[plane] : nullptr; |
786 | } |
787 | |
788 | /*! |
789 | Returns a pointer to the start of the frame data buffer. |
790 | |
791 | This value is only valid while the frame data is \l {map()}{mapped}. |
792 | |
793 | If the buffer was not mapped with read access, the contents of this |
794 | buffer will initially be uninitialized. |
795 | |
796 | \sa map(), mappedBytes(), bytesPerLine() |
797 | */ |
798 | const uchar *QVideoFrame::bits() const |
799 | { |
800 | return d->data[0]; |
801 | } |
802 | |
803 | /*! |
804 | Returns a pointer to the start of the frame data buffer for a \a plane. |
805 | |
806 | This value is only valid while the frame data is \l {map()}{mapped}. |
807 | |
808 | If the buffer was not mapped with read access, the contents of this |
809 | buffer will initially be uninitialized. |
810 | |
811 | \sa map(), mappedBytes(), bytesPerLine(), planeCount() |
812 | \since 5.4 |
813 | */ |
814 | const uchar *QVideoFrame::bits(int plane) const |
815 | { |
816 | return plane >= 0 && plane < d->planeCount ? d->data[plane] : nullptr; |
817 | } |
818 | |
819 | /*! |
820 | Returns the number of bytes occupied by the mapped frame data. |
821 | |
822 | This value is only valid while the frame data is \l {map()}{mapped}. |
823 | |
824 | \sa map() |
825 | */ |
826 | int QVideoFrame::mappedBytes() const |
827 | { |
828 | return d->mappedBytes; |
829 | } |
830 | |
831 | /*! |
832 | Returns the number of planes in the video frame. |
833 | |
834 | This value is only valid while the frame data is \l {map()}{mapped}. |
835 | |
836 | \sa map() |
837 | \since 5.4 |
838 | */ |
839 | |
840 | int QVideoFrame::planeCount() const |
841 | { |
842 | return d->planeCount; |
843 | } |
844 | |
845 | /*! |
846 | Returns a type specific handle to a video frame's buffer. |
847 | |
848 | For an OpenGL texture this would be the texture ID. |
849 | |
850 | \sa QAbstractVideoBuffer::handle() |
851 | */ |
852 | QVariant QVideoFrame::handle() const |
853 | { |
854 | return d->buffer != nullptr ? d->buffer->handle() : QVariant(); |
855 | } |
856 | |
857 | /*! |
858 | Returns the presentation time (in microseconds) when the frame should be displayed. |
859 | |
860 | An invalid time is represented as -1. |
861 | |
862 | */ |
863 | qint64 QVideoFrame::startTime() const |
864 | { |
865 | return d->startTime; |
866 | } |
867 | |
868 | /*! |
869 | Sets the presentation \a time (in microseconds) when the frame should initially be displayed. |
870 | |
871 | An invalid time is represented as -1. |
872 | |
873 | */ |
874 | void QVideoFrame::setStartTime(qint64 time) |
875 | { |
876 | d->startTime = time; |
877 | } |
878 | |
879 | /*! |
880 | Returns the presentation time (in microseconds) when a frame should stop being displayed. |
881 | |
882 | An invalid time is represented as -1. |
883 | |
884 | */ |
885 | qint64 QVideoFrame::endTime() const |
886 | { |
887 | return d->endTime; |
888 | } |
889 | |
890 | /*! |
891 | Sets the presentation \a time (in microseconds) when a frame should stop being displayed. |
892 | |
893 | An invalid time is represented as -1. |
894 | |
895 | */ |
896 | void QVideoFrame::setEndTime(qint64 time) |
897 | { |
898 | d->endTime = time; |
899 | } |
900 | |
901 | /*! |
902 | Returns any extra metadata associated with this frame. |
903 | */ |
904 | QVariantMap QVideoFrame::availableMetaData() const |
905 | { |
906 | return d->metadata; |
907 | } |
908 | |
909 | /*! |
910 | Returns any metadata for this frame for the given \a key. |
911 | |
912 | This might include frame specific information from |
913 | a camera, or subtitles from a decoded video stream. |
914 | |
915 | See the documentation for the relevant video frame |
916 | producer for further information about available metadata. |
917 | */ |
918 | QVariant QVideoFrame::metaData(const QString &key) const |
919 | { |
920 | return d->metadata.value(akey: key); |
921 | } |
922 | |
923 | /*! |
924 | Sets the metadata for the given \a key to \a value. |
925 | |
926 | If \a value is a null variant, any metadata for this key will be removed. |
927 | |
928 | The producer of the video frame might use this to associate |
929 | certain data with this frame, or for an intermediate processor |
930 | to add information for a consumer of this frame. |
931 | */ |
932 | void QVideoFrame::setMetaData(const QString &key, const QVariant &value) |
933 | { |
934 | if (!value.isNull()) |
935 | d->metadata.insert(akey: key, avalue: value); |
936 | else |
937 | d->metadata.remove(akey: key); |
938 | } |
939 | |
940 | /*! |
941 | Returns a video pixel format equivalent to an image \a format. If there is no equivalent |
942 | format QVideoFrame::InvalidType is returned instead. |
943 | |
944 | \note In general \l QImage does not handle YUV formats. |
945 | |
946 | */ |
947 | QVideoFrame::PixelFormat QVideoFrame::pixelFormatFromImageFormat(QImage::Format format) |
948 | { |
949 | switch (format) { |
950 | case QImage::Format_RGB32: |
951 | case QImage::Format_RGBX8888: |
952 | return Format_RGB32; |
953 | case QImage::Format_ARGB32: |
954 | case QImage::Format_RGBA8888: |
955 | return Format_ARGB32; |
956 | case QImage::Format_ARGB32_Premultiplied: |
957 | case QImage::Format_RGBA8888_Premultiplied: |
958 | return Format_ARGB32_Premultiplied; |
959 | case QImage::Format_RGB16: |
960 | return Format_RGB565; |
961 | case QImage::Format_ARGB8565_Premultiplied: |
962 | return Format_ARGB8565_Premultiplied; |
963 | case QImage::Format_RGB555: |
964 | return Format_RGB555; |
965 | case QImage::Format_RGB888: |
966 | return Format_RGB24; |
967 | default: |
968 | return Format_Invalid; |
969 | } |
970 | } |
971 | |
972 | /*! |
973 | Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent |
974 | format QImage::Format_Invalid is returned instead. |
975 | |
976 | \note In general \l QImage does not handle YUV formats. |
977 | |
978 | */ |
979 | QImage::Format QVideoFrame::imageFormatFromPixelFormat(PixelFormat format) |
980 | { |
981 | switch (format) { |
982 | case Format_Invalid: |
983 | return QImage::Format_Invalid; |
984 | case Format_ARGB32: |
985 | return QImage::Format_ARGB32; |
986 | case Format_ARGB32_Premultiplied: |
987 | return QImage::Format_ARGB32_Premultiplied; |
988 | case Format_RGB32: |
989 | return QImage::Format_RGB32; |
990 | case Format_RGB24: |
991 | return QImage::Format_RGB888; |
992 | case Format_RGB565: |
993 | return QImage::Format_RGB16; |
994 | case Format_RGB555: |
995 | return QImage::Format_RGB555; |
996 | case Format_ARGB8565_Premultiplied: |
997 | return QImage::Format_ARGB8565_Premultiplied; |
998 | case Format_BGRA32: |
999 | case Format_BGRA32_Premultiplied: |
1000 | case Format_BGR32: |
1001 | case Format_BGR24: |
1002 | return QImage::Format_Invalid; |
1003 | case Format_BGR565: |
1004 | case Format_BGR555: |
1005 | case Format_BGRA5658_Premultiplied: |
1006 | case Format_AYUV444: |
1007 | case Format_AYUV444_Premultiplied: |
1008 | case Format_YUV444: |
1009 | case Format_YUV420P: |
1010 | case Format_YUV422P: |
1011 | case Format_YV12: |
1012 | case Format_UYVY: |
1013 | case Format_YUYV: |
1014 | case Format_NV12: |
1015 | case Format_NV21: |
1016 | case Format_IMC1: |
1017 | case Format_IMC2: |
1018 | case Format_IMC3: |
1019 | case Format_IMC4: |
1020 | case Format_Y8: |
1021 | case Format_Y16: |
1022 | case Format_Jpeg: |
1023 | case Format_CameraRaw: |
1024 | case Format_AdobeDng: |
1025 | return QImage::Format_Invalid; |
1026 | case Format_User: |
1027 | default: |
1028 | return QImage::Format_Invalid; |
1029 | } |
1030 | return QImage::Format_Invalid; |
1031 | } |
1032 | |
1033 | |
1034 | extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32(const QVideoFrame&, uchar*); |
1035 | extern void QT_FASTCALL qt_convert_BGR24_to_ARGB32(const QVideoFrame&, uchar*); |
1036 | extern void QT_FASTCALL qt_convert_BGR565_to_ARGB32(const QVideoFrame&, uchar*); |
1037 | extern void QT_FASTCALL qt_convert_BGR555_to_ARGB32(const QVideoFrame&, uchar*); |
1038 | extern void QT_FASTCALL qt_convert_AYUV444_to_ARGB32(const QVideoFrame&, uchar*); |
1039 | extern void QT_FASTCALL qt_convert_YUV444_to_ARGB32(const QVideoFrame&, uchar*); |
1040 | extern void QT_FASTCALL qt_convert_YUV420P_to_ARGB32(const QVideoFrame&, uchar*); |
1041 | extern void QT_FASTCALL qt_convert_YV12_to_ARGB32(const QVideoFrame&, uchar*); |
1042 | extern void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame&, uchar*); |
1043 | extern void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame&, uchar*); |
1044 | extern void QT_FASTCALL qt_convert_NV12_to_ARGB32(const QVideoFrame&, uchar*); |
1045 | extern void QT_FASTCALL qt_convert_NV21_to_ARGB32(const QVideoFrame&, uchar*); |
1046 | |
1047 | static VideoFrameConvertFunc qConvertFuncs[QVideoFrame::NPixelFormats] = { |
1048 | /* Format_Invalid */ nullptr, // Not needed |
1049 | /* Format_ARGB32 */ nullptr, // Not needed |
1050 | /* Format_ARGB32_Premultiplied */ nullptr, // Not needed |
1051 | /* Format_RGB32 */ nullptr, // Not needed |
1052 | /* Format_RGB24 */ nullptr, // Not needed |
1053 | /* Format_RGB565 */ nullptr, // Not needed |
1054 | /* Format_RGB555 */ nullptr, // Not needed |
1055 | /* Format_ARGB8565_Premultiplied */ nullptr, // Not needed |
1056 | /* Format_BGRA32 */ qt_convert_BGRA32_to_ARGB32, |
1057 | /* Format_BGRA32_Premultiplied */ qt_convert_BGRA32_to_ARGB32, |
1058 | /* Format_BGR32 */ qt_convert_BGRA32_to_ARGB32, |
1059 | /* Format_BGR24 */ qt_convert_BGR24_to_ARGB32, |
1060 | /* Format_BGR565 */ qt_convert_BGR565_to_ARGB32, |
1061 | /* Format_BGR555 */ qt_convert_BGR555_to_ARGB32, |
1062 | /* Format_BGRA5658_Premultiplied */ nullptr, |
1063 | /* Format_AYUV444 */ qt_convert_AYUV444_to_ARGB32, |
1064 | /* Format_AYUV444_Premultiplied */ nullptr, |
1065 | /* Format_YUV444 */ qt_convert_YUV444_to_ARGB32, |
1066 | /* Format_YUV420P */ qt_convert_YUV420P_to_ARGB32, |
1067 | /* Format_YV12 */ qt_convert_YV12_to_ARGB32, |
1068 | /* Format_UYVY */ qt_convert_UYVY_to_ARGB32, |
1069 | /* Format_YUYV */ qt_convert_YUYV_to_ARGB32, |
1070 | /* Format_NV12 */ qt_convert_NV12_to_ARGB32, |
1071 | /* Format_NV21 */ qt_convert_NV21_to_ARGB32, |
1072 | /* Format_IMC1 */ nullptr, |
1073 | /* Format_IMC2 */ nullptr, |
1074 | /* Format_IMC3 */ nullptr, |
1075 | /* Format_IMC4 */ nullptr, |
1076 | /* Format_Y8 */ nullptr, |
1077 | /* Format_Y16 */ nullptr, |
1078 | /* Format_Jpeg */ nullptr, // Not needed |
1079 | /* Format_CameraRaw */ nullptr, |
1080 | /* Format_AdobeDng */ nullptr, |
1081 | /* Format_ABGR32 */ nullptr, // ### Qt 6: reorder |
1082 | /* Format_YUV422P */ nullptr, |
1083 | }; |
1084 | |
1085 | static void qInitConvertFuncsAsm() |
1086 | { |
1087 | #ifdef QT_COMPILER_SUPPORTS_SSE2 |
1088 | extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_sse2(const QVideoFrame&, uchar*); |
1089 | if (qCpuHasFeature(SSE2)){ |
1090 | qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2; |
1091 | qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2; |
1092 | qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2; |
1093 | } |
1094 | #endif |
1095 | #ifdef QT_COMPILER_SUPPORTS_SSSE3 |
1096 | extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_ssse3(const QVideoFrame&, uchar*); |
1097 | if (qCpuHasFeature(SSSE3)){ |
1098 | qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3; |
1099 | qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3; |
1100 | qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3; |
1101 | } |
1102 | #endif |
1103 | #ifdef QT_COMPILER_SUPPORTS_AVX2 |
1104 | extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_avx2(const QVideoFrame&, uchar*); |
1105 | if (qCpuHasFeature(AVX2)){ |
1106 | qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2; |
1107 | qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2; |
1108 | qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2; |
1109 | } |
1110 | #endif |
1111 | } |
1112 | |
1113 | /*! |
1114 | Based on the pixel format converts current video frame to image. |
1115 | \since 5.15 |
1116 | */ |
1117 | QImage QVideoFrame::image() const |
1118 | { |
1119 | QVideoFrame frame = *this; |
1120 | QImage result; |
1121 | |
1122 | if (!frame.isValid() || !frame.map(mode: QAbstractVideoBuffer::ReadOnly)) |
1123 | return result; |
1124 | |
1125 | // Formats supported by QImage don't need conversion |
1126 | QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(format: frame.pixelFormat()); |
1127 | if (imageFormat != QImage::Format_Invalid) { |
1128 | result = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), imageFormat).copy(); |
1129 | } |
1130 | |
1131 | // Load from JPG |
1132 | else if (frame.pixelFormat() == QVideoFrame::Format_Jpeg) { |
1133 | result.loadFromData(buf: frame.bits(), len: frame.mappedBytes(), format: "JPG" ); |
1134 | } |
1135 | |
1136 | // Need conversion |
1137 | else { |
1138 | static bool initAsmFuncsDone = false; |
1139 | if (!initAsmFuncsDone) { |
1140 | qInitConvertFuncsAsm(); |
1141 | initAsmFuncsDone = true; |
1142 | } |
1143 | VideoFrameConvertFunc convert = qConvertFuncs[frame.pixelFormat()]; |
1144 | if (!convert) { |
1145 | qWarning() << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat(); |
1146 | } else { |
1147 | result = QImage(frame.width(), frame.height(), QImage::Format_ARGB32); |
1148 | convert(frame, result.bits()); |
1149 | } |
1150 | } |
1151 | |
1152 | frame.unmap(); |
1153 | |
1154 | return result; |
1155 | } |
1156 | |
1157 | #ifndef QT_NO_DEBUG_STREAM |
1158 | QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf) |
1159 | { |
1160 | QDebugStateSaver saver(dbg); |
1161 | dbg.nospace(); |
1162 | switch (pf) { |
1163 | case QVideoFrame::Format_Invalid: |
1164 | return dbg << "Format_Invalid" ; |
1165 | case QVideoFrame::Format_ARGB32: |
1166 | return dbg << "Format_ARGB32" ; |
1167 | case QVideoFrame::Format_ARGB32_Premultiplied: |
1168 | return dbg << "Format_ARGB32_Premultiplied" ; |
1169 | case QVideoFrame::Format_RGB32: |
1170 | return dbg << "Format_RGB32" ; |
1171 | case QVideoFrame::Format_RGB24: |
1172 | return dbg << "Format_RGB24" ; |
1173 | case QVideoFrame::Format_RGB565: |
1174 | return dbg << "Format_RGB565" ; |
1175 | case QVideoFrame::Format_RGB555: |
1176 | return dbg << "Format_RGB555" ; |
1177 | case QVideoFrame::Format_ARGB8565_Premultiplied: |
1178 | return dbg << "Format_ARGB8565_Premultiplied" ; |
1179 | case QVideoFrame::Format_BGRA32: |
1180 | return dbg << "Format_BGRA32" ; |
1181 | case QVideoFrame::Format_BGRA32_Premultiplied: |
1182 | return dbg << "Format_BGRA32_Premultiplied" ; |
1183 | case QVideoFrame::Format_ABGR32: |
1184 | return dbg << "Format_ABGR32" ; |
1185 | case QVideoFrame::Format_BGR32: |
1186 | return dbg << "Format_BGR32" ; |
1187 | case QVideoFrame::Format_BGR24: |
1188 | return dbg << "Format_BGR24" ; |
1189 | case QVideoFrame::Format_BGR565: |
1190 | return dbg << "Format_BGR565" ; |
1191 | case QVideoFrame::Format_BGR555: |
1192 | return dbg << "Format_BGR555" ; |
1193 | case QVideoFrame::Format_BGRA5658_Premultiplied: |
1194 | return dbg << "Format_BGRA5658_Premultiplied" ; |
1195 | case QVideoFrame::Format_AYUV444: |
1196 | return dbg << "Format_AYUV444" ; |
1197 | case QVideoFrame::Format_AYUV444_Premultiplied: |
1198 | return dbg << "Format_AYUV444_Premultiplied" ; |
1199 | case QVideoFrame::Format_YUV444: |
1200 | return dbg << "Format_YUV444" ; |
1201 | case QVideoFrame::Format_YUV420P: |
1202 | return dbg << "Format_YUV420P" ; |
1203 | case QVideoFrame::Format_YUV422P: |
1204 | return dbg << "Format_YUV422P" ; |
1205 | case QVideoFrame::Format_YV12: |
1206 | return dbg << "Format_YV12" ; |
1207 | case QVideoFrame::Format_UYVY: |
1208 | return dbg << "Format_UYVY" ; |
1209 | case QVideoFrame::Format_YUYV: |
1210 | return dbg << "Format_YUYV" ; |
1211 | case QVideoFrame::Format_NV12: |
1212 | return dbg << "Format_NV12" ; |
1213 | case QVideoFrame::Format_NV21: |
1214 | return dbg << "Format_NV21" ; |
1215 | case QVideoFrame::Format_IMC1: |
1216 | return dbg << "Format_IMC1" ; |
1217 | case QVideoFrame::Format_IMC2: |
1218 | return dbg << "Format_IMC2" ; |
1219 | case QVideoFrame::Format_IMC3: |
1220 | return dbg << "Format_IMC3" ; |
1221 | case QVideoFrame::Format_IMC4: |
1222 | return dbg << "Format_IMC4" ; |
1223 | case QVideoFrame::Format_Y8: |
1224 | return dbg << "Format_Y8" ; |
1225 | case QVideoFrame::Format_Y16: |
1226 | return dbg << "Format_Y16" ; |
1227 | case QVideoFrame::Format_Jpeg: |
1228 | return dbg << "Format_Jpeg" ; |
1229 | case QVideoFrame::Format_AdobeDng: |
1230 | return dbg << "Format_AdobeDng" ; |
1231 | case QVideoFrame::Format_CameraRaw: |
1232 | return dbg << "Format_CameraRaw" ; |
1233 | |
1234 | default: |
1235 | return dbg << QString(QLatin1String("UserType(%1)" )).arg(a: int(pf)).toLatin1().constData(); |
1236 | } |
1237 | } |
1238 | |
1239 | QDebug operator<<(QDebug dbg, QVideoFrame::FieldType f) |
1240 | { |
1241 | QDebugStateSaver saver(dbg); |
1242 | dbg.nospace(); |
1243 | switch (f) { |
1244 | case QVideoFrame::TopField: |
1245 | return dbg << "TopField" ; |
1246 | case QVideoFrame::BottomField: |
1247 | return dbg << "BottomField" ; |
1248 | case QVideoFrame::InterlacedFrame: |
1249 | return dbg << "InterlacedFrame" ; |
1250 | default: |
1251 | return dbg << "ProgressiveFrame" ; |
1252 | } |
1253 | } |
1254 | |
1255 | static QString qFormatTimeStamps(qint64 start, qint64 end) |
1256 | { |
1257 | // Early out for invalid. |
1258 | if (start < 0) |
1259 | return QLatin1String("[no timestamp]" ); |
1260 | |
1261 | bool onlyOne = (start == end); |
1262 | |
1263 | // [hh:]mm:ss.ms |
1264 | const int s_millis = start % 1000000; |
1265 | start /= 1000000; |
1266 | const int s_seconds = start % 60; |
1267 | start /= 60; |
1268 | const int s_minutes = start % 60; |
1269 | start /= 60; |
1270 | |
1271 | if (onlyOne) { |
1272 | if (start > 0) |
1273 | return QString::fromLatin1(str: "@%1:%2:%3.%4" ) |
1274 | .arg(a: start, fieldwidth: 1, base: 10, fillChar: QLatin1Char('0')) |
1275 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1276 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1277 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1278 | else |
1279 | return QString::fromLatin1(str: "@%1:%2.%3" ) |
1280 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1281 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1282 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1283 | } else if (end == -1) { |
1284 | // Similar to start-start, except it means keep displaying it? |
1285 | if (start > 0) |
1286 | return QString::fromLatin1(str: "%1:%2:%3.%4 - forever" ) |
1287 | .arg(a: start, fieldwidth: 1, base: 10, fillChar: QLatin1Char('0')) |
1288 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1289 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1290 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1291 | else |
1292 | return QString::fromLatin1(str: "%1:%2.%3 - forever" ) |
1293 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1294 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1295 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1296 | } else { |
1297 | const int e_millis = end % 1000000; |
1298 | end /= 1000000; |
1299 | const int e_seconds = end % 60; |
1300 | end /= 60; |
1301 | const int e_minutes = end % 60; |
1302 | end /= 60; |
1303 | |
1304 | if (start > 0 || end > 0) |
1305 | return QString::fromLatin1(str: "%1:%2:%3.%4 - %5:%6:%7.%8" ) |
1306 | .arg(a: start, fieldwidth: 1, base: 10, fillChar: QLatin1Char('0')) |
1307 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1308 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1309 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1310 | .arg(a: end, fieldwidth: 1, base: 10, fillChar: QLatin1Char('0')) |
1311 | .arg(a: e_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1312 | .arg(a: e_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1313 | .arg(a: e_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1314 | else |
1315 | return QString::fromLatin1(str: "%1:%2.%3 - %4:%5.%6" ) |
1316 | .arg(a: s_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1317 | .arg(a: s_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1318 | .arg(a: s_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1319 | .arg(a: e_minutes, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1320 | .arg(a: e_seconds, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')) |
1321 | .arg(a: e_millis, fieldWidth: 2, base: 10, fillChar: QLatin1Char('0')); |
1322 | } |
1323 | } |
1324 | |
1325 | QDebug operator<<(QDebug dbg, const QVideoFrame& f) |
1326 | { |
1327 | QDebugStateSaver saver(dbg); |
1328 | dbg.nospace(); |
1329 | dbg << "QVideoFrame(" << f.size() << ", " |
1330 | << f.pixelFormat() << ", " |
1331 | << f.handleType() << ", " |
1332 | << f.mapMode() << ", " |
1333 | << qFormatTimeStamps(start: f.startTime(), end: f.endTime()).toLatin1().constData(); |
1334 | if (f.availableMetaData().count()) |
1335 | dbg << ", metaData: " << f.availableMetaData(); |
1336 | dbg << ')'; |
1337 | return dbg; |
1338 | } |
1339 | #endif |
1340 | |
1341 | QT_END_NAMESPACE |
1342 | |
1343 | |