1 | /**************************************************************************** |
2 | ** |
3 | ** Copyright (C) 2016 The Qt Company Ltd. |
4 | ** Contact: https://www.qt.io/licensing/ |
5 | ** |
6 | ** This file is part of the Qt Toolkit. |
7 | ** |
8 | ** $QT_BEGIN_LICENSE:LGPL$ |
9 | ** Commercial License Usage |
10 | ** Licensees holding valid commercial Qt licenses may use this file in |
11 | ** accordance with the commercial license agreement provided with the |
12 | ** Software or, alternatively, in accordance with the terms contained in |
13 | ** a written agreement between you and The Qt Company. For licensing terms |
14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
15 | ** information use the contact form at https://www.qt.io/contact-us. |
16 | ** |
17 | ** GNU Lesser General Public License Usage |
18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
19 | ** General Public License version 3 as published by the Free Software |
20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
21 | ** packaging of this file. Please review the following information to |
22 | ** ensure the GNU Lesser General Public License version 3 requirements |
23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
24 | ** |
25 | ** GNU General Public License Usage |
26 | ** Alternatively, this file may be used under the terms of the GNU |
27 | ** General Public License version 2.0 or (at your option) the GNU General |
28 | ** Public license version 3 or any later version approved by the KDE Free |
29 | ** Qt Foundation. The licenses are as published by the Free Software |
30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
31 | ** included in the packaging of this file. Please review the following |
32 | ** information to ensure the GNU General Public License requirements will |
33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
35 | ** |
36 | ** $QT_END_LICENSE$ |
37 | ** |
38 | ****************************************************************************/ |
39 | |
40 | #include "qvideosurfaceformat.h" |
41 | |
42 | #include <qdebug.h> |
43 | #include <qmetatype.h> |
44 | #include <qpair.h> |
45 | #include <qvariant.h> |
46 | #include <qvector.h> |
47 | |
48 | QT_BEGIN_NAMESPACE |
49 | |
50 | static void qRegisterVideoSurfaceFormatMetaTypes() |
51 | { |
52 | qRegisterMetaType<QVideoSurfaceFormat>(); |
53 | qRegisterMetaType<QVideoSurfaceFormat::Direction>(); |
54 | qRegisterMetaType<QVideoSurfaceFormat::YCbCrColorSpace>(); |
55 | } |
56 | |
57 | Q_CONSTRUCTOR_FUNCTION(qRegisterVideoSurfaceFormatMetaTypes) |
58 | |
59 | |
60 | class QVideoSurfaceFormatPrivate : public QSharedData |
61 | { |
62 | public: |
63 | QVideoSurfaceFormatPrivate() |
64 | : pixelFormat(QVideoFrame::Format_Invalid) |
65 | , handleType(QAbstractVideoBuffer::NoHandle) |
66 | , scanLineDirection(QVideoSurfaceFormat::TopToBottom) |
67 | , pixelAspectRatio(1, 1) |
68 | , ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) |
69 | , frameRate(0.0) |
70 | , mirrored(false) |
71 | { |
72 | } |
73 | |
74 | QVideoSurfaceFormatPrivate( |
75 | const QSize &size, |
76 | QVideoFrame::PixelFormat format, |
77 | QAbstractVideoBuffer::HandleType type) |
78 | : pixelFormat(format) |
79 | , handleType(type) |
80 | , scanLineDirection(QVideoSurfaceFormat::TopToBottom) |
81 | , frameSize(size) |
82 | , pixelAspectRatio(1, 1) |
83 | , ycbcrColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) |
84 | , viewport(QPoint(0, 0), size) |
85 | , frameRate(0.0) |
86 | , mirrored(false) |
87 | { |
88 | } |
89 | |
90 | QVideoSurfaceFormatPrivate(const QVideoSurfaceFormatPrivate &other) |
91 | : QSharedData(other) |
92 | , pixelFormat(other.pixelFormat) |
93 | , handleType(other.handleType) |
94 | , scanLineDirection(other.scanLineDirection) |
95 | , frameSize(other.frameSize) |
96 | , pixelAspectRatio(other.pixelAspectRatio) |
97 | , ycbcrColorSpace(other.ycbcrColorSpace) |
98 | , viewport(other.viewport) |
99 | , frameRate(other.frameRate) |
100 | , mirrored(other.mirrored) |
101 | , propertyNames(other.propertyNames) |
102 | , propertyValues(other.propertyValues) |
103 | { |
104 | } |
105 | |
106 | bool operator ==(const QVideoSurfaceFormatPrivate &other) const |
107 | { |
108 | if (pixelFormat == other.pixelFormat |
109 | && handleType == other.handleType |
110 | && scanLineDirection == other.scanLineDirection |
111 | && frameSize == other.frameSize |
112 | && pixelAspectRatio == other.pixelAspectRatio |
113 | && viewport == other.viewport |
114 | && frameRatesEqual(r1: frameRate, r2: other.frameRate) |
115 | && ycbcrColorSpace == other.ycbcrColorSpace |
116 | && mirrored == other.mirrored |
117 | && propertyNames.count() == other.propertyNames.count()) { |
118 | for (int i = 0; i < propertyNames.count(); ++i) { |
119 | int j = other.propertyNames.indexOf(t: propertyNames.at(i)); |
120 | |
121 | if (j == -1 || propertyValues.at(i) != other.propertyValues.at(i: j)) |
122 | return false; |
123 | } |
124 | return true; |
125 | } else { |
126 | return false; |
127 | } |
128 | } |
129 | |
130 | inline static bool frameRatesEqual(qreal r1, qreal r2) |
131 | { |
132 | return qAbs(t: r1 - r2) <= 0.00001 * qMin(a: qAbs(t: r1), b: qAbs(t: r2)); |
133 | } |
134 | |
135 | QVideoFrame::PixelFormat pixelFormat; |
136 | QAbstractVideoBuffer::HandleType handleType; |
137 | QVideoSurfaceFormat::Direction scanLineDirection; |
138 | QSize frameSize; |
139 | QSize pixelAspectRatio; |
140 | QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace; |
141 | QRect viewport; |
142 | qreal frameRate; |
143 | bool mirrored; |
144 | QList<QByteArray> propertyNames; |
145 | QList<QVariant> propertyValues; |
146 | }; |
147 | |
148 | /*! |
149 | \class QVideoSurfaceFormat |
150 | \brief The QVideoSurfaceFormat class specifies the stream format of a video presentation |
151 | surface. |
152 | \inmodule QtMultimedia |
153 | |
154 | \ingroup multimedia |
155 | \ingroup multimedia_video |
156 | |
157 | A video surface presents a stream of video frames. The surface's format describes the type of |
158 | the frames and determines how they should be presented. |
159 | |
160 | The core properties of a video stream required to setup a video surface are the pixel format |
161 | given by pixelFormat(), and the frame dimensions given by frameSize(). |
162 | |
163 | If the surface is to present frames using a frame's handle a surface format will also include |
164 | a handle type which is given by the handleType() function. |
165 | |
166 | The region of a frame that is actually displayed on a video surface is given by the viewport(). |
167 | A stream may have a viewport less than the entire region of a frame to allow for videos smaller |
168 | than the nearest optimal size of a video frame. For example the width of a frame may be |
169 | extended so that the start of each scan line is eight byte aligned. |
170 | |
171 | Other common properties are the pixelAspectRatio(), scanLineDirection(), and frameRate(). |
172 | Additionally a stream may have some additional type specific properties which are listed by the |
173 | dynamicPropertyNames() function and can be accessed using the property(), and setProperty() |
174 | functions. |
175 | */ |
176 | |
177 | /*! |
178 | \enum QVideoSurfaceFormat::Direction |
179 | |
180 | Enumerates the layout direction of video scan lines. |
181 | |
182 | \value TopToBottom Scan lines are arranged from the top of the frame to the bottom. |
183 | \value BottomToTop Scan lines are arranged from the bottom of the frame to the top. |
184 | */ |
185 | |
186 | /*! |
187 | \enum QVideoSurfaceFormat::YCbCrColorSpace |
188 | |
189 | Enumerates the Y'CbCr color space of video frames. |
190 | |
191 | \value YCbCr_Undefined |
192 | No color space is specified. |
193 | |
194 | \value YCbCr_BT601 |
195 | A Y'CbCr color space defined by ITU-R recommendation BT.601 |
196 | with Y value range from 16 to 235, and Cb/Cr range from 16 to 240. |
197 | Used in standard definition video. |
198 | |
199 | \value YCbCr_BT709 |
200 | A Y'CbCr color space defined by ITU-R BT.709 with the same values range as YCbCr_BT601. Used |
201 | for HDTV. |
202 | |
203 | \value YCbCr_xvYCC601 |
204 | The BT.601 color space with the value range extended to 0 to 255. |
205 | It is backward compatibile with BT.601 and uses values outside BT.601 range to represent a |
206 | wider range of colors. |
207 | |
208 | \value YCbCr_xvYCC709 |
209 | The BT.709 color space with the value range extended to 0 to 255. |
210 | |
211 | \value YCbCr_JPEG |
212 | The full range Y'CbCr color space used in JPEG files. |
213 | */ |
214 | |
215 | /*! |
216 | Constructs a null video stream format. |
217 | */ |
218 | QVideoSurfaceFormat::QVideoSurfaceFormat() |
219 | : d(new QVideoSurfaceFormatPrivate) |
220 | { |
221 | } |
222 | |
223 | /*! |
224 | Contructs a description of stream which receives stream of \a type buffers with given frame |
225 | \a size and pixel \a format. |
226 | */ |
227 | QVideoSurfaceFormat::QVideoSurfaceFormat( |
228 | const QSize& size, QVideoFrame::PixelFormat format, QAbstractVideoBuffer::HandleType type) |
229 | : d(new QVideoSurfaceFormatPrivate(size, format, type)) |
230 | { |
231 | } |
232 | |
233 | /*! |
234 | Constructs a copy of \a other. |
235 | */ |
236 | QVideoSurfaceFormat::QVideoSurfaceFormat(const QVideoSurfaceFormat &other) |
237 | : d(other.d) |
238 | { |
239 | } |
240 | |
241 | /*! |
242 | Assigns the values of \a other to this object. |
243 | */ |
244 | QVideoSurfaceFormat &QVideoSurfaceFormat::operator =(const QVideoSurfaceFormat &other) |
245 | { |
246 | d = other.d; |
247 | |
248 | return *this; |
249 | } |
250 | |
251 | /*! |
252 | Destroys a video stream description. |
253 | */ |
254 | QVideoSurfaceFormat::~QVideoSurfaceFormat() |
255 | { |
256 | } |
257 | |
258 | /*! |
259 | Identifies if a video surface format has a valid pixel format and frame size. |
260 | |
261 | Returns true if the format is valid, and false otherwise. |
262 | */ |
263 | bool QVideoSurfaceFormat::isValid() const |
264 | { |
265 | return d->pixelFormat != QVideoFrame::Format_Invalid && d->frameSize.isValid(); |
266 | } |
267 | |
268 | /*! |
269 | Returns true if \a other is the same as this video format, and false if they are different. |
270 | */ |
271 | bool QVideoSurfaceFormat::operator ==(const QVideoSurfaceFormat &other) const |
272 | { |
273 | return d == other.d || *d == *other.d; |
274 | } |
275 | |
276 | /*! |
277 | Returns true if \a other is different to this video format, and false if they are the same. |
278 | */ |
279 | bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const |
280 | { |
281 | return d != other.d && !(*d == *other.d); |
282 | } |
283 | |
284 | /*! |
285 | Returns the pixel format of frames in a video stream. |
286 | */ |
287 | QVideoFrame::PixelFormat QVideoSurfaceFormat::pixelFormat() const |
288 | { |
289 | return d->pixelFormat; |
290 | } |
291 | |
292 | /*! |
293 | Returns the type of handle the surface uses to present the frame data. |
294 | |
295 | If the handle type is \c QAbstractVideoBuffer::NoHandle, buffers with any handle type are valid |
296 | provided they can be \l {QAbstractVideoBuffer::map()}{mapped} with the |
297 | QAbstractVideoBuffer::ReadOnly flag. If the handleType() is not QAbstractVideoBuffer::NoHandle |
298 | then the handle type of the buffer must be the same as that of the surface format. |
299 | */ |
300 | QAbstractVideoBuffer::HandleType QVideoSurfaceFormat::handleType() const |
301 | { |
302 | return d->handleType; |
303 | } |
304 | |
305 | /*! |
306 | Returns the dimensions of frames in a video stream. |
307 | |
308 | \sa frameWidth(), frameHeight() |
309 | */ |
310 | QSize QVideoSurfaceFormat::frameSize() const |
311 | { |
312 | return d->frameSize; |
313 | } |
314 | |
315 | /*! |
316 | Returns the width of frames in a video stream. |
317 | |
318 | \sa frameSize(), frameHeight() |
319 | */ |
320 | int QVideoSurfaceFormat::frameWidth() const |
321 | { |
322 | return d->frameSize.width(); |
323 | } |
324 | |
325 | /*! |
326 | Returns the height of frame in a video stream. |
327 | */ |
328 | int QVideoSurfaceFormat::frameHeight() const |
329 | { |
330 | return d->frameSize.height(); |
331 | } |
332 | |
333 | /*! |
334 | Sets the size of frames in a video stream to \a size. |
335 | |
336 | This will reset the viewport() to fill the entire frame. |
337 | */ |
338 | void QVideoSurfaceFormat::setFrameSize(const QSize &size) |
339 | { |
340 | d->frameSize = size; |
341 | d->viewport = QRect(QPoint(0, 0), size); |
342 | } |
343 | |
344 | /*! |
345 | \overload |
346 | |
347 | Sets the \a width and \a height of frames in a video stream. |
348 | |
349 | This will reset the viewport() to fill the entire frame. |
350 | */ |
351 | void QVideoSurfaceFormat::setFrameSize(int width, int height) |
352 | { |
353 | d->frameSize = QSize(width, height); |
354 | d->viewport = QRect(0, 0, width, height); |
355 | } |
356 | |
357 | /*! |
358 | Returns the viewport of a video stream. |
359 | |
360 | The viewport is the region of a video frame that is actually displayed. |
361 | |
362 | By default the viewport covers an entire frame. |
363 | */ |
364 | QRect QVideoSurfaceFormat::viewport() const |
365 | { |
366 | return d->viewport; |
367 | } |
368 | |
369 | /*! |
370 | Sets the viewport of a video stream to \a viewport. |
371 | */ |
372 | void QVideoSurfaceFormat::setViewport(const QRect &viewport) |
373 | { |
374 | d->viewport = viewport; |
375 | } |
376 | |
377 | /*! |
378 | Returns the direction of scan lines. |
379 | */ |
380 | QVideoSurfaceFormat::Direction QVideoSurfaceFormat::scanLineDirection() const |
381 | { |
382 | return d->scanLineDirection; |
383 | } |
384 | |
385 | /*! |
386 | Sets the \a direction of scan lines. |
387 | */ |
388 | void QVideoSurfaceFormat::setScanLineDirection(Direction direction) |
389 | { |
390 | d->scanLineDirection = direction; |
391 | } |
392 | |
393 | /*! |
394 | Returns the frame rate of a video stream in frames per second. |
395 | */ |
396 | qreal QVideoSurfaceFormat::frameRate() const |
397 | { |
398 | return d->frameRate; |
399 | } |
400 | |
401 | /*! |
402 | Sets the frame \a rate of a video stream in frames per second. |
403 | */ |
404 | void QVideoSurfaceFormat::setFrameRate(qreal rate) |
405 | { |
406 | d->frameRate = rate; |
407 | } |
408 | |
409 | /*! |
410 | Returns a video stream's pixel aspect ratio. |
411 | */ |
412 | QSize QVideoSurfaceFormat::pixelAspectRatio() const |
413 | { |
414 | return d->pixelAspectRatio; |
415 | } |
416 | |
417 | /*! |
418 | Sets a video stream's pixel aspect \a ratio. |
419 | */ |
420 | void QVideoSurfaceFormat::setPixelAspectRatio(const QSize &ratio) |
421 | { |
422 | d->pixelAspectRatio = ratio; |
423 | } |
424 | |
425 | /*! |
426 | \overload |
427 | |
428 | Sets the \a horizontal and \a vertical elements of a video stream's pixel aspect ratio. |
429 | */ |
430 | void QVideoSurfaceFormat::setPixelAspectRatio(int horizontal, int vertical) |
431 | { |
432 | d->pixelAspectRatio = QSize(horizontal, vertical); |
433 | } |
434 | |
435 | /*! |
436 | Returns the Y'CbCr color space of a video stream. |
437 | */ |
438 | QVideoSurfaceFormat::YCbCrColorSpace QVideoSurfaceFormat::yCbCrColorSpace() const |
439 | { |
440 | return d->ycbcrColorSpace; |
441 | } |
442 | |
443 | /*! |
444 | Sets the Y'CbCr color \a space of a video stream. |
445 | It is only used with raw YUV frame types. |
446 | */ |
447 | void QVideoSurfaceFormat::setYCbCrColorSpace(QVideoSurfaceFormat::YCbCrColorSpace space) |
448 | { |
449 | d->ycbcrColorSpace = space; |
450 | } |
451 | |
452 | /*! |
453 | Returns \c true if the surface is mirrored around its vertical axis. |
454 | This is typically needed for video frames coming from a front camera of a mobile device. |
455 | |
456 | \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage |
457 | will be mirrored around its x-axis. |
458 | |
459 | \since 5.11 |
460 | */ |
461 | bool QVideoSurfaceFormat::isMirrored() const |
462 | { |
463 | return d->mirrored; |
464 | } |
465 | |
466 | /*! |
467 | Sets if the surface is \a mirrored around its vertical axis. |
468 | This is typically needed for video frames coming from a front camera of a mobile device. |
469 | Default value is false. |
470 | |
471 | \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage |
472 | will be mirrored around its x-axis. |
473 | |
474 | \since 5.11 |
475 | */ |
476 | void QVideoSurfaceFormat::setMirrored(bool mirrored) |
477 | { |
478 | d->mirrored = mirrored; |
479 | } |
480 | |
481 | /*! |
482 | Returns a suggested size in pixels for the video stream. |
483 | |
484 | This is the size of the viewport scaled according to the pixel aspect ratio. |
485 | */ |
486 | QSize QVideoSurfaceFormat::sizeHint() const |
487 | { |
488 | QSize size = d->viewport.size(); |
489 | |
490 | if (d->pixelAspectRatio.height() != 0) |
491 | size.setWidth(size.width() * d->pixelAspectRatio.width() / d->pixelAspectRatio.height()); |
492 | |
493 | return size; |
494 | } |
495 | |
496 | /*! |
497 | Returns a list of video format dynamic property names. |
498 | */ |
499 | QList<QByteArray> QVideoSurfaceFormat::propertyNames() const |
500 | { |
501 | return (QList<QByteArray>() |
502 | << "handleType" |
503 | << "pixelFormat" |
504 | << "frameSize" |
505 | << "frameWidth" |
506 | << "viewport" |
507 | << "scanLineDirection" |
508 | << "frameRate" |
509 | << "pixelAspectRatio" |
510 | << "sizeHint" |
511 | << "yCbCrColorSpace" |
512 | << "mirrored" ) |
513 | + d->propertyNames; |
514 | } |
515 | |
516 | /*! |
517 | Returns the value of the video format's \a name property. |
518 | */ |
519 | QVariant QVideoSurfaceFormat::property(const char *name) const |
520 | { |
521 | if (qstrcmp(str1: name, str2: "handleType" ) == 0) { |
522 | return QVariant::fromValue(value: d->handleType); |
523 | } else if (qstrcmp(str1: name, str2: "pixelFormat" ) == 0) { |
524 | return QVariant::fromValue(value: d->pixelFormat); |
525 | } else if (qstrcmp(str1: name, str2: "frameSize" ) == 0) { |
526 | return d->frameSize; |
527 | } else if (qstrcmp(str1: name, str2: "frameWidth" ) == 0) { |
528 | return d->frameSize.width(); |
529 | } else if (qstrcmp(str1: name, str2: "frameHeight" ) == 0) { |
530 | return d->frameSize.height(); |
531 | } else if (qstrcmp(str1: name, str2: "viewport" ) == 0) { |
532 | return d->viewport; |
533 | } else if (qstrcmp(str1: name, str2: "scanLineDirection" ) == 0) { |
534 | return QVariant::fromValue(value: d->scanLineDirection); |
535 | } else if (qstrcmp(str1: name, str2: "frameRate" ) == 0) { |
536 | return QVariant::fromValue(value: d->frameRate); |
537 | } else if (qstrcmp(str1: name, str2: "pixelAspectRatio" ) == 0) { |
538 | return QVariant::fromValue(value: d->pixelAspectRatio); |
539 | } else if (qstrcmp(str1: name, str2: "sizeHint" ) == 0) { |
540 | return sizeHint(); |
541 | } else if (qstrcmp(str1: name, str2: "yCbCrColorSpace" ) == 0) { |
542 | return QVariant::fromValue(value: d->ycbcrColorSpace); |
543 | } else if (qstrcmp(str1: name, str2: "mirrored" ) == 0) { |
544 | return d->mirrored; |
545 | } else { |
546 | int id = 0; |
547 | for (; id < d->propertyNames.count() && d->propertyNames.at(i: id) != name; ++id) {} |
548 | |
549 | return id < d->propertyValues.count() |
550 | ? d->propertyValues.at(i: id) |
551 | : QVariant(); |
552 | } |
553 | } |
554 | |
555 | /*! |
556 | Sets the video format's \a name property to \a value. |
557 | |
558 | Trying to set a read only property will be ignored. |
559 | |
560 | */ |
561 | void QVideoSurfaceFormat::setProperty(const char *name, const QVariant &value) |
562 | { |
563 | if (qstrcmp(str1: name, str2: "handleType" ) == 0) { |
564 | // read only. |
565 | } else if (qstrcmp(str1: name, str2: "pixelFormat" ) == 0) { |
566 | // read only. |
567 | } else if (qstrcmp(str1: name, str2: "frameSize" ) == 0) { |
568 | if (value.canConvert<QSize>()) { |
569 | d->frameSize = qvariant_cast<QSize>(v: value); |
570 | d->viewport = QRect(QPoint(0, 0), d->frameSize); |
571 | } |
572 | } else if (qstrcmp(str1: name, str2: "frameWidth" ) == 0) { |
573 | // read only. |
574 | } else if (qstrcmp(str1: name, str2: "frameHeight" ) == 0) { |
575 | // read only. |
576 | } else if (qstrcmp(str1: name, str2: "viewport" ) == 0) { |
577 | if (value.canConvert<QRect>()) |
578 | d->viewport = qvariant_cast<QRect>(v: value); |
579 | } else if (qstrcmp(str1: name, str2: "scanLineDirection" ) == 0) { |
580 | if (value.canConvert<Direction>()) |
581 | d->scanLineDirection = qvariant_cast<Direction>(v: value); |
582 | } else if (qstrcmp(str1: name, str2: "frameRate" ) == 0) { |
583 | if (value.canConvert<qreal>()) |
584 | d->frameRate = qvariant_cast<qreal>(v: value); |
585 | } else if (qstrcmp(str1: name, str2: "pixelAspectRatio" ) == 0) { |
586 | if (value.canConvert<QSize>()) |
587 | d->pixelAspectRatio = qvariant_cast<QSize>(v: value); |
588 | } else if (qstrcmp(str1: name, str2: "sizeHint" ) == 0) { |
589 | // read only. |
590 | } else if (qstrcmp(str1: name, str2: "yCbCrColorSpace" ) == 0) { |
591 | if (value.canConvert<YCbCrColorSpace>()) |
592 | d->ycbcrColorSpace = qvariant_cast<YCbCrColorSpace>(v: value); |
593 | } else if (qstrcmp(str1: name, str2: "mirrored" ) == 0) { |
594 | if (value.canConvert<bool>()) |
595 | d->mirrored = qvariant_cast<bool>(v: value); |
596 | } else { |
597 | int id = 0; |
598 | for (; id < d->propertyNames.count() && d->propertyNames.at(i: id) != name; ++id) {} |
599 | |
600 | if (id < d->propertyValues.count()) { |
601 | if (value.isNull()) { |
602 | d->propertyNames.removeAt(i: id); |
603 | d->propertyValues.removeAt(i: id); |
604 | } else { |
605 | d->propertyValues[id] = value; |
606 | } |
607 | } else if (!value.isNull()) { |
608 | d->propertyNames.append(t: QByteArray(name)); |
609 | d->propertyValues.append(t: value); |
610 | } |
611 | } |
612 | } |
613 | |
614 | |
615 | #ifndef QT_NO_DEBUG_STREAM |
616 | QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs) |
617 | { |
618 | QDebugStateSaver saver(dbg); |
619 | dbg.nospace(); |
620 | switch (cs) { |
621 | case QVideoSurfaceFormat::YCbCr_BT601: |
622 | dbg << "YCbCr_BT601" ; |
623 | break; |
624 | case QVideoSurfaceFormat::YCbCr_BT709: |
625 | dbg << "YCbCr_BT709" ; |
626 | break; |
627 | case QVideoSurfaceFormat::YCbCr_JPEG: |
628 | dbg << "YCbCr_JPEG" ; |
629 | break; |
630 | case QVideoSurfaceFormat::YCbCr_xvYCC601: |
631 | dbg << "YCbCr_xvYCC601" ; |
632 | break; |
633 | case QVideoSurfaceFormat::YCbCr_xvYCC709: |
634 | dbg << "YCbCr_xvYCC709" ; |
635 | break; |
636 | case QVideoSurfaceFormat::YCbCr_CustomMatrix: |
637 | dbg << "YCbCr_CustomMatrix" ; |
638 | break; |
639 | default: |
640 | dbg << "YCbCr_Undefined" ; |
641 | break; |
642 | } |
643 | return dbg; |
644 | } |
645 | |
646 | QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::Direction dir) |
647 | { |
648 | QDebugStateSaver saver(dbg); |
649 | dbg.nospace(); |
650 | switch (dir) { |
651 | case QVideoSurfaceFormat::BottomToTop: |
652 | dbg << "BottomToTop" ; |
653 | break; |
654 | case QVideoSurfaceFormat::TopToBottom: |
655 | dbg << "TopToBottom" ; |
656 | break; |
657 | } |
658 | return dbg; |
659 | } |
660 | |
661 | QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f) |
662 | { |
663 | QDebugStateSaver saver(dbg); |
664 | dbg.nospace(); |
665 | dbg << "QVideoSurfaceFormat(" << f.pixelFormat() << ", " << f.frameSize() |
666 | << ", viewport=" << f.viewport() << ", pixelAspectRatio=" << f.pixelAspectRatio() |
667 | << ", handleType=" << f.handleType() << ", yCbCrColorSpace=" << f.yCbCrColorSpace() |
668 | << ')'; |
669 | |
670 | const auto propertyNames = f.propertyNames(); |
671 | for (const QByteArray& propertyName : propertyNames) |
672 | dbg << "\n " << propertyName.data() << " = " << f.property(name: propertyName.data()); |
673 | |
674 | return dbg; |
675 | } |
676 | #endif |
677 | |
678 | QT_END_NAMESPACE |
679 | |