1 | // Copyright (C) 2021 The Qt Company Ltd. |
---|---|
2 | // Copyright (C) 2016 Research In Motion |
3 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
4 | #include "qquickvideooutput_p.h" |
5 | |
6 | #include <private/qvideooutputorientationhandler_p.h> |
7 | #include <QtMultimedia/qmediaplayer.h> |
8 | #include <QtMultimedia/qmediacapturesession.h> |
9 | #include <private/qfactoryloader_p.h> |
10 | #include <QtCore/qloggingcategory.h> |
11 | #include <QtQuick/QQuickWindow> |
12 | #include <private/qquickwindow_p.h> |
13 | #include <private/qmultimediautils_p.h> |
14 | #include <qsgvideonode_p.h> |
15 | #include <QtCore/qrunnable.h> |
16 | |
17 | QT_BEGIN_NAMESPACE |
18 | |
19 | static Q_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video") |
20 | |
21 | namespace { |
22 | |
23 | inline bool qIsDefaultAspect(int o) |
24 | { |
25 | return (o % 180) == 0; |
26 | } |
27 | |
28 | inline bool qIsDefaultAspect(QtVideo::Rotation rotation) |
29 | { |
30 | return qIsDefaultAspect(o: qToUnderlying(e: rotation)); |
31 | } |
32 | |
33 | /* |
34 | * Return the orientation normalized to 0-359 |
35 | */ |
36 | inline int qNormalizedOrientation(int o) |
37 | { |
38 | // Negative orientations give negative results |
39 | int o2 = o % 360; |
40 | if (o2 < 0) |
41 | o2 += 360; |
42 | return o2; |
43 | } |
44 | |
45 | } |
46 | |
47 | /*! |
48 | \qmltype VideoOutput |
49 | //! \nativetype QQuickVideoOutput |
50 | \brief Render video or camera viewfinder. |
51 | |
52 | \ingroup multimedia_qml |
53 | \ingroup multimedia_video_qml |
54 | \inqmlmodule QtMultimedia |
55 | |
56 | \qml |
57 | |
58 | Rectangle { |
59 | width: 800 |
60 | height: 600 |
61 | color: "black" |
62 | |
63 | MediaPlayer { |
64 | id: player |
65 | source: "file://video.webm" |
66 | videoOutput: videoOutput |
67 | } |
68 | |
69 | VideoOutput { |
70 | id: videoOutput |
71 | anchors.fill: parent |
72 | } |
73 | } |
74 | |
75 | \endqml |
76 | |
77 | The VideoOutput item supports untransformed, stretched, and uniformly scaled video presentation. |
78 | For a description of stretched uniformly scaled presentation, see the \l fillMode property |
79 | description. |
80 | |
81 | \sa MediaPlayer, Camera |
82 | |
83 | \omit |
84 | \section1 Screen Saver |
85 | |
86 | If it is likely that an application will be playing video for an extended |
87 | period of time without user interaction it may be necessary to disable |
88 | the platform's screen saver. The \l ScreenSaver (from \l QtSystemInfo) |
89 | may be used to disable the screensaver in this fashion: |
90 | |
91 | \qml |
92 | import QtSystemInfo |
93 | |
94 | ScreenSaver { screenSaverEnabled: false } |
95 | \endqml |
96 | \endomit |
97 | */ |
98 | |
99 | // TODO: Restore Qt System Info docs when the module is released |
100 | |
101 | /*! |
102 | \internal |
103 | \class QQuickVideoOutput |
104 | \brief The QQuickVideoOutput class provides a video output item. |
105 | */ |
106 | |
107 | QQuickVideoOutput::QQuickVideoOutput(QQuickItem *parent) : |
108 | QQuickItem(parent) |
109 | { |
110 | setFlag(flag: ItemHasContents, enabled: true); |
111 | |
112 | m_sink = new QQuickVideoSink(this); |
113 | qRegisterMetaType<QVideoFrameFormat>(); |
114 | connect(sender: m_sink, signal: &QVideoSink::videoFrameChanged, context: this, |
115 | slot: [this](const QVideoFrame &frame) { |
116 | setFrame(frame); |
117 | QMetaObject::invokeMethod(object: this, function: &QQuickVideoOutput::_q_newFrame, args: frame.size()); |
118 | }, |
119 | type: Qt::DirectConnection); |
120 | |
121 | initRhiForSink(); |
122 | } |
123 | |
124 | QQuickVideoOutput::~QQuickVideoOutput() |
125 | { |
126 | } |
127 | |
128 | /*! |
129 | \qmlproperty object QtMultimedia::VideoOutput::videoSink |
130 | |
131 | This property holds the underlaying C++ QVideoSink object that is used |
132 | to render the video frames to this VideoOutput element. |
133 | |
134 | Normal usage of VideoOutput from QML should not require using this property. |
135 | */ |
136 | |
137 | QVideoSink *QQuickVideoOutput::videoSink() const |
138 | { |
139 | return m_sink; |
140 | } |
141 | |
142 | /*! |
143 | \qmlproperty enumeration QtMultimedia::VideoOutput::fillMode |
144 | |
145 | Set this property to define how the video is scaled to fit the target area. |
146 | |
147 | \list |
148 | \li Stretch - the video is scaled to fit. |
149 | \li PreserveAspectFit - the video is scaled uniformly to fit without cropping |
150 | \li PreserveAspectCrop - the video is scaled uniformly to fill, cropping if necessary |
151 | \endlist |
152 | |
153 | The default fill mode is PreserveAspectFit. |
154 | */ |
155 | |
156 | QQuickVideoOutput::FillMode QQuickVideoOutput::fillMode() const |
157 | { |
158 | return FillMode(m_aspectRatioMode); |
159 | } |
160 | |
161 | void QQuickVideoOutput::setFillMode(FillMode mode) |
162 | { |
163 | if (mode == fillMode()) |
164 | return; |
165 | |
166 | m_aspectRatioMode = Qt::AspectRatioMode(mode); |
167 | |
168 | m_geometryDirty = true; |
169 | update(); |
170 | |
171 | emit fillModeChanged(mode); |
172 | } |
173 | |
174 | void QQuickVideoOutput::_q_newFrame(QSize size) |
175 | { |
176 | update(); |
177 | |
178 | size = qRotatedFrameSize(size, rotation: m_frameDisplayingRotation); |
179 | |
180 | if (m_nativeSize != size) { |
181 | m_nativeSize = size; |
182 | |
183 | m_geometryDirty = true; |
184 | |
185 | setImplicitWidth(size.width()); |
186 | setImplicitHeight(size.height()); |
187 | |
188 | emit sourceRectChanged(); |
189 | } |
190 | } |
191 | |
192 | /* Based on fill mode and our size, figure out the source/dest rects */ |
193 | void QQuickVideoOutput::_q_updateGeometry() |
194 | { |
195 | const QRectF rect(0, 0, width(), height()); |
196 | const QRectF absoluteRect(x(), y(), width(), height()); |
197 | |
198 | if (!m_geometryDirty && m_lastRect == absoluteRect) |
199 | return; |
200 | |
201 | QRectF oldContentRect(m_contentRect); |
202 | |
203 | m_geometryDirty = false; |
204 | m_lastRect = absoluteRect; |
205 | |
206 | const auto fill = m_aspectRatioMode; |
207 | if (m_nativeSize.isEmpty()) { |
208 | //this is necessary for item to receive the |
209 | //first paint event and configure video surface. |
210 | m_contentRect = rect; |
211 | } else if (fill == Qt::IgnoreAspectRatio) { |
212 | m_contentRect = rect; |
213 | } else { |
214 | QSizeF scaled = m_nativeSize; |
215 | scaled.scale(s: rect.size(), mode: fill); |
216 | |
217 | m_contentRect = QRectF(QPointF(), scaled); |
218 | m_contentRect.moveCenter(p: rect.center()); |
219 | } |
220 | |
221 | updateGeometry(); |
222 | |
223 | if (m_contentRect != oldContentRect) |
224 | emit contentRectChanged(); |
225 | } |
226 | |
227 | /*! |
228 | \qmlproperty int QtMultimedia::VideoOutput::orientation |
229 | |
230 | In some cases the source video stream requires a certain |
231 | orientation to be correct. This includes |
232 | sources like a camera viewfinder, where the displayed |
233 | viewfinder should match reality, no matter what rotation |
234 | the rest of the user interface has. |
235 | |
236 | This property allows you to apply a rotation (in steps |
237 | of 90 degrees) to compensate for any user interface |
238 | rotation, with positive values in the anti-clockwise direction. |
239 | |
240 | The orientation change will also affect the mapping |
241 | of coordinates from source to viewport. |
242 | */ |
243 | int QQuickVideoOutput::orientation() const |
244 | { |
245 | return m_orientation; |
246 | } |
247 | |
248 | void QQuickVideoOutput::setOrientation(int orientation) |
249 | { |
250 | // Make sure it's a multiple of 90. |
251 | if (orientation % 90) |
252 | return; |
253 | |
254 | // If there's no actual change, return |
255 | if (m_orientation == orientation) |
256 | return; |
257 | |
258 | // If the new orientation is the same effect |
259 | // as the old one, don't update the video node stuff |
260 | if ((m_orientation % 360) == (orientation % 360)) { |
261 | m_orientation = orientation; |
262 | emit orientationChanged(); |
263 | return; |
264 | } |
265 | |
266 | m_geometryDirty = true; |
267 | |
268 | // Otherwise, a new orientation |
269 | // See if we need to change aspect ratio orientation too |
270 | bool oldAspect = qIsDefaultAspect(o: m_orientation); |
271 | bool newAspect = qIsDefaultAspect(o: orientation); |
272 | |
273 | m_orientation = orientation; |
274 | |
275 | { |
276 | QMutexLocker lock(&m_frameMutex); |
277 | m_frameDisplayingRotation = qNormalizedFrameTransformation(frame: m_frame, additionalRotaton: m_orientation).rotation; |
278 | } |
279 | |
280 | if (oldAspect != newAspect) { |
281 | m_nativeSize.transpose(); |
282 | |
283 | setImplicitWidth(m_nativeSize.width()); |
284 | setImplicitHeight(m_nativeSize.height()); |
285 | |
286 | // Source rectangle does not change for orientation |
287 | } |
288 | |
289 | update(); |
290 | emit orientationChanged(); |
291 | } |
292 | |
293 | /*! |
294 | \qmlproperty rectangle QtMultimedia::VideoOutput::contentRect |
295 | |
296 | This property holds the item coordinates of the area that |
297 | would contain video to render. With certain fill modes, |
298 | this rectangle will be larger than the visible area of the |
299 | \c VideoOutput. |
300 | |
301 | This property is useful when other coordinates are specified |
302 | in terms of the source dimensions - this applied for relative |
303 | (normalized) frame coordinates in the range of 0 to 1.0. |
304 | |
305 | Areas outside this will be transparent. |
306 | */ |
307 | QRectF QQuickVideoOutput::contentRect() const |
308 | { |
309 | return m_contentRect; |
310 | } |
311 | |
312 | /*! |
313 | \qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect |
314 | |
315 | This property holds the area of the source video |
316 | content that is considered for rendering. The |
317 | values are in source pixel coordinates, adjusted for |
318 | the source's pixel aspect ratio. |
319 | |
320 | Note that typically the top left corner of this rectangle |
321 | will be \c {0,0} while the width and height will be the |
322 | width and height of the input content. Only when the video |
323 | source has a viewport set, these values will differ. |
324 | |
325 | The orientation setting does not affect this rectangle. |
326 | |
327 | \sa QVideoFrameFormat::viewport() |
328 | */ |
329 | QRectF QQuickVideoOutput::sourceRect() const |
330 | { |
331 | // We might have to transpose back |
332 | QSizeF size = m_nativeSize; |
333 | if (!size.isValid()) |
334 | return {}; |
335 | |
336 | if (!qIsDefaultAspect(rotation: m_frameDisplayingRotation)) |
337 | size.transpose(); |
338 | |
339 | |
340 | // Take the viewport into account for the top left position. |
341 | // m_nativeSize is already adjusted to the viewport, as it originates |
342 | // from QVideoFrameFormat::viewport(), which includes pixel aspect ratio |
343 | const QRectF viewport = adjustedViewport(); |
344 | Q_ASSERT(viewport.size() == size); |
345 | return QRectF(viewport.topLeft(), size); |
346 | } |
347 | |
348 | void QQuickVideoOutput::geometryChange(const QRectF &newGeometry, const QRectF &oldGeometry) |
349 | { |
350 | Q_UNUSED(newGeometry); |
351 | Q_UNUSED(oldGeometry); |
352 | |
353 | QQuickItem::geometryChange(newGeometry, oldGeometry); |
354 | |
355 | // Explicitly listen to geometry changes here. This is needed since changing the position does |
356 | // not trigger a call to updatePaintNode(). |
357 | // We need to react to position changes though, as the window backened's display rect gets |
358 | // changed in that situation. |
359 | _q_updateGeometry(); |
360 | } |
361 | |
362 | void QQuickVideoOutput::_q_invalidateSceneGraph() |
363 | { |
364 | invalidateSceneGraph(); |
365 | } |
366 | |
367 | void QQuickVideoOutput::_q_sceneGraphInitialized() |
368 | { |
369 | initRhiForSink(); |
370 | } |
371 | |
372 | void QQuickVideoOutput::releaseResources() |
373 | { |
374 | // Called on the gui thread when the window is closed or changed. |
375 | invalidateSceneGraph(); |
376 | } |
377 | |
378 | void QQuickVideoOutput::invalidateSceneGraph() |
379 | { |
380 | // Called on the render thread, e.g. when the context is lost. |
381 | // QMutexLocker lock(&m_frameMutex); |
382 | initRhiForSink(); |
383 | } |
384 | |
385 | void QQuickVideoOutput::initRhiForSink() |
386 | { |
387 | QRhi *rhi = m_window ? QQuickWindowPrivate::get(c: m_window)->rhi : nullptr; |
388 | m_sink->setRhi(rhi); |
389 | } |
390 | |
391 | void QQuickVideoOutput::itemChange(QQuickItem::ItemChange change, |
392 | const QQuickItem::ItemChangeData &changeData) |
393 | { |
394 | if (change != QQuickItem::ItemSceneChange) |
395 | return; |
396 | |
397 | if (changeData.window == m_window) |
398 | return; |
399 | if (m_window) |
400 | disconnect(receiver: m_window); |
401 | m_window = changeData.window; |
402 | |
403 | if (m_window) { |
404 | // We want to receive the signals in the render thread |
405 | connect(sender: m_window, signal: &QQuickWindow::sceneGraphInitialized, context: this, |
406 | slot: &QQuickVideoOutput::_q_sceneGraphInitialized, type: Qt::DirectConnection); |
407 | connect(sender: m_window, signal: &QQuickWindow::sceneGraphInvalidated, context: this, |
408 | slot: &QQuickVideoOutput::_q_invalidateSceneGraph, type: Qt::DirectConnection); |
409 | } |
410 | initRhiForSink(); |
411 | } |
412 | |
413 | QSize QQuickVideoOutput::nativeSize() const |
414 | { |
415 | return m_videoFormat.viewport().size(); |
416 | } |
417 | |
418 | void QQuickVideoOutput::updateGeometry() |
419 | { |
420 | const QRectF viewport = m_videoFormat.viewport(); |
421 | const QSizeF frameSize = m_videoFormat.frameSize(); |
422 | const QRectF normalizedViewport(viewport.x() / frameSize.width(), |
423 | viewport.y() / frameSize.height(), |
424 | viewport.width() / frameSize.width(), |
425 | viewport.height() / frameSize.height()); |
426 | const QRectF rect(0, 0, width(), height()); |
427 | if (nativeSize().isEmpty()) { |
428 | m_renderedRect = rect; |
429 | m_sourceTextureRect = normalizedViewport; |
430 | } else if (m_aspectRatioMode == Qt::IgnoreAspectRatio) { |
431 | m_renderedRect = rect; |
432 | m_sourceTextureRect = normalizedViewport; |
433 | } else if (m_aspectRatioMode == Qt::KeepAspectRatio) { |
434 | m_sourceTextureRect = normalizedViewport; |
435 | m_renderedRect = contentRect(); |
436 | } else if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) { |
437 | m_renderedRect = rect; |
438 | const qreal contentHeight = contentRect().height(); |
439 | const qreal contentWidth = contentRect().width(); |
440 | |
441 | // Calculate the size of the source rectangle without taking the viewport into account |
442 | const qreal relativeOffsetLeft = -contentRect().left() / contentWidth; |
443 | const qreal relativeOffsetTop = -contentRect().top() / contentHeight; |
444 | const qreal relativeWidth = rect.width() / contentWidth; |
445 | const qreal relativeHeight = rect.height() / contentHeight; |
446 | |
447 | // Now take the viewport size into account |
448 | const qreal totalOffsetLeft = normalizedViewport.x() + relativeOffsetLeft * normalizedViewport.width(); |
449 | const qreal totalOffsetTop = normalizedViewport.y() + relativeOffsetTop * normalizedViewport.height(); |
450 | const qreal totalWidth = normalizedViewport.width() * relativeWidth; |
451 | const qreal totalHeight = normalizedViewport.height() * relativeHeight; |
452 | |
453 | if (qIsDefaultAspect(rotation: m_frameDisplayingRotation)) { |
454 | m_sourceTextureRect = QRectF(totalOffsetLeft, totalOffsetTop, |
455 | totalWidth, totalHeight); |
456 | } else { |
457 | m_sourceTextureRect = QRectF(totalOffsetTop, totalOffsetLeft, |
458 | totalHeight, totalWidth); |
459 | } |
460 | } |
461 | } |
462 | |
463 | QSGNode *QQuickVideoOutput::updatePaintNode(QSGNode *oldNode, |
464 | QQuickItem::UpdatePaintNodeData *data) |
465 | { |
466 | Q_UNUSED(data); |
467 | _q_updateGeometry(); |
468 | |
469 | QSGVideoNode *videoNode = static_cast<QSGVideoNode *>(oldNode); |
470 | |
471 | QMutexLocker lock(&m_frameMutex); |
472 | |
473 | if (m_frameChanged) { |
474 | if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) { |
475 | qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed"; |
476 | delete videoNode; |
477 | videoNode = nullptr; |
478 | } |
479 | |
480 | if (!m_frame.isValid()) { |
481 | qCDebug(qLcVideo) << "updatePaintNode: no frames yet"; |
482 | m_frameChanged = false; |
483 | return nullptr; |
484 | } |
485 | |
486 | if (!videoNode) { |
487 | // Get a node that supports our frame. The surface is irrelevant, our |
488 | // QSGVideoItemSurface supports (logically) anything. |
489 | updateGeometry(); |
490 | videoNode = new QSGVideoNode(this, m_videoFormat); |
491 | qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:"<< m_frame.handleType(); |
492 | } |
493 | } |
494 | |
495 | if (!videoNode) { |
496 | m_frameChanged = false; |
497 | m_frame = QVideoFrame(); |
498 | return nullptr; |
499 | } |
500 | |
501 | if (m_frameChanged) { |
502 | videoNode->setCurrentFrame(m_frame); |
503 | |
504 | updateHdr(videoNode); |
505 | |
506 | //don't keep the frame for more than really necessary |
507 | m_frameChanged = false; |
508 | m_frame = QVideoFrame(); |
509 | } |
510 | |
511 | // Negative rotations need lots of %360 |
512 | videoNode->setTexturedRectGeometry(boundingRect: m_renderedRect, textureRect: m_sourceTextureRect, |
513 | orientation: qNormalizedOrientation(o: orientation())); |
514 | |
515 | return videoNode; |
516 | } |
517 | |
518 | void QQuickVideoOutput::updateHdr(QSGVideoNode *videoNode) |
519 | { |
520 | auto *videoOutputWindow = window(); |
521 | if (!videoOutputWindow) |
522 | return; |
523 | |
524 | auto *swapChain = videoOutputWindow->swapChain(); |
525 | if (!swapChain) |
526 | return; |
527 | |
528 | const auto requiredSwapChainFormat = qGetRequiredSwapChainFormat(format: m_frame.surfaceFormat()); |
529 | if (qShouldUpdateSwapChainFormat(swapChain, requiredSwapChainFormat)) { |
530 | auto *recreateSwapChainJob = QRunnable::create(functionToRun: [swapChain, requiredSwapChainFormat]() { |
531 | swapChain->destroy(); |
532 | swapChain->setFormat(requiredSwapChainFormat); |
533 | swapChain->createOrResize(); |
534 | }); |
535 | |
536 | // Even though the 'recreate swap chain' job is scheduled for the current frame the |
537 | // effect will be visible only starting from the next frame since the recreation would |
538 | // happen after the actual swap. |
539 | videoOutputWindow->scheduleRenderJob(job: recreateSwapChainJob, schedule: QQuickWindow::AfterSwapStage); |
540 | } |
541 | |
542 | videoNode->setSurfaceFormat(swapChain->format()); |
543 | videoNode->setHdrInfo(swapChain->hdrInfo()); |
544 | } |
545 | |
546 | QRectF QQuickVideoOutput::adjustedViewport() const |
547 | { |
548 | return m_videoFormat.viewport(); |
549 | } |
550 | |
551 | void QQuickVideoOutput::setFrame(const QVideoFrame &frame) |
552 | { |
553 | QMutexLocker lock(&m_frameMutex); |
554 | |
555 | m_videoFormat = frame.surfaceFormat(); |
556 | m_frame = frame; |
557 | m_frameDisplayingRotation = qNormalizedFrameTransformation(frame, additionalRotaton: m_orientation).rotation; |
558 | m_frameChanged = true; |
559 | } |
560 | |
561 | QT_END_NAMESPACE |
562 | |
563 | #include "moc_qquickvideooutput_p.cpp" |
564 |
Definitions
- qLcVideo
- qIsDefaultAspect
- qIsDefaultAspect
- qNormalizedOrientation
- QQuickVideoOutput
- ~QQuickVideoOutput
- videoSink
- fillMode
- setFillMode
- _q_newFrame
- _q_updateGeometry
- orientation
- setOrientation
- contentRect
- sourceRect
- geometryChange
- _q_invalidateSceneGraph
- _q_sceneGraphInitialized
- releaseResources
- invalidateSceneGraph
- initRhiForSink
- itemChange
- nativeSize
- updateGeometry
- updatePaintNode
- updateHdr
- adjustedViewport
Learn to use CMake with our Intro Training
Find out more