1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // Copyright (C) 2016 Research In Motion |
3 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
4 | #include "qquickvideooutput_p.h" |
5 | |
6 | #include <private/qvideooutputorientationhandler_p.h> |
7 | #include <QtMultimedia/qmediaplayer.h> |
8 | #include <QtMultimedia/qmediacapturesession.h> |
9 | #include <private/qfactoryloader_p.h> |
10 | #include <QtCore/qloggingcategory.h> |
11 | #include <qvideosink.h> |
12 | #include <QtQuick/QQuickWindow> |
13 | #include <private/qquickwindow_p.h> |
14 | #include <qsgvideonode_p.h> |
15 | |
16 | QT_BEGIN_NAMESPACE |
17 | |
18 | static Q_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video" ) |
19 | |
20 | namespace { |
21 | |
22 | inline bool qIsDefaultAspect(int o) |
23 | { |
24 | return (o % 180) == 0; |
25 | } |
26 | |
27 | /* |
28 | * Return the orientation normalized to 0-359 |
29 | */ |
30 | inline int qNormalizedOrientation(int o) |
31 | { |
32 | // Negative orientations give negative results |
33 | int o2 = o % 360; |
34 | if (o2 < 0) |
35 | o2 += 360; |
36 | return o2; |
37 | } |
38 | |
39 | } |
40 | |
41 | /*! |
42 | \qmltype VideoOutput |
43 | //! \instantiates QQuickVideoOutput |
44 | \brief Render video or camera viewfinder. |
45 | |
46 | \ingroup multimedia_qml |
47 | \ingroup multimedia_video_qml |
48 | \inqmlmodule QtMultimedia |
49 | |
50 | \qml |
51 | |
52 | Rectangle { |
53 | width: 800 |
54 | height: 600 |
55 | color: "black" |
56 | |
57 | MediaPlayer { |
58 | id: player |
59 | source: "file://video.webm" |
60 | videoOutput: videoOutput |
61 | } |
62 | |
63 | VideoOutput { |
64 | id: videoOutput |
65 | anchors.fill: parent |
66 | } |
67 | } |
68 | |
69 | \endqml |
70 | |
71 | The VideoOutput item supports untransformed, stretched, and uniformly scaled video presentation. |
72 | For a description of stretched uniformly scaled presentation, see the \l fillMode property |
73 | description. |
74 | |
75 | \sa MediaPlayer, Camera |
76 | |
77 | \omit |
78 | \section1 Screen Saver |
79 | |
80 | If it is likely that an application will be playing video for an extended |
81 | period of time without user interaction it may be necessary to disable |
82 | the platform's screen saver. The \l ScreenSaver (from \l QtSystemInfo) |
83 | may be used to disable the screensaver in this fashion: |
84 | |
85 | \qml |
86 | import QtSystemInfo |
87 | |
88 | ScreenSaver { screenSaverEnabled: false } |
89 | \endqml |
90 | \endomit |
91 | */ |
92 | |
93 | // TODO: Restore Qt System Info docs when the module is released |
94 | |
95 | /*! |
96 | \internal |
97 | \class QQuickVideoOutput |
98 | \brief The QQuickVideoOutput class provides a video output item. |
99 | */ |
100 | |
101 | QQuickVideoOutput::QQuickVideoOutput(QQuickItem *parent) : |
102 | QQuickItem(parent) |
103 | { |
104 | setFlag(flag: ItemHasContents, enabled: true); |
105 | |
106 | m_sink = new QVideoSink(this); |
107 | qRegisterMetaType<QVideoFrameFormat>(); |
108 | QObject::connect(sender: m_sink, signal: &QVideoSink::videoFrameChanged, context: this, |
109 | slot: [&](const QVideoFrame &frame) { |
110 | setFrame(frame); |
111 | emit frameUpdated(frame.size()); |
112 | }, type: Qt::DirectConnection); |
113 | |
114 | QObject::connect(sender: this, signal: &QQuickVideoOutput::frameUpdated, |
115 | context: this, slot: &QQuickVideoOutput::_q_newFrame); |
116 | |
117 | initRhiForSink(); |
118 | } |
119 | |
120 | QQuickVideoOutput::~QQuickVideoOutput() |
121 | { |
122 | } |
123 | |
124 | /*! |
125 | \qmlproperty object QtMultimedia::VideoOutput::videoSink |
126 | |
127 | This property holds the underlaying C++ QVideoSink object that is used |
128 | to render the video frames to this VideoOutput element. |
129 | |
130 | Normal usage of VideoOutput from QML should not require using this property. |
131 | */ |
132 | |
133 | QVideoSink *QQuickVideoOutput::videoSink() const |
134 | { |
135 | return m_sink; |
136 | } |
137 | |
138 | /*! |
139 | \qmlproperty enumeration QtMultimedia::VideoOutput::fillMode |
140 | |
141 | Set this property to define how the video is scaled to fit the target area. |
142 | |
143 | \list |
144 | \li Stretch - the video is scaled to fit. |
145 | \li PreserveAspectFit - the video is scaled uniformly to fit without cropping |
146 | \li PreserveAspectCrop - the video is scaled uniformly to fill, cropping if necessary |
147 | \endlist |
148 | |
149 | The default fill mode is PreserveAspectFit. |
150 | */ |
151 | |
152 | QQuickVideoOutput::FillMode QQuickVideoOutput::fillMode() const |
153 | { |
154 | return FillMode(m_aspectRatioMode); |
155 | } |
156 | |
157 | void QQuickVideoOutput::setFillMode(FillMode mode) |
158 | { |
159 | if (mode == fillMode()) |
160 | return; |
161 | |
162 | m_aspectRatioMode = Qt::AspectRatioMode(mode); |
163 | |
164 | m_geometryDirty = true; |
165 | update(); |
166 | |
167 | emit fillModeChanged(mode); |
168 | } |
169 | |
170 | void QQuickVideoOutput::_q_newFrame(QSize size) |
171 | { |
172 | update(); |
173 | |
174 | if (!qIsDefaultAspect(o: m_orientation + m_frameOrientation)) { |
175 | size.transpose(); |
176 | } |
177 | |
178 | if (m_nativeSize != size) { |
179 | m_nativeSize = size; |
180 | |
181 | m_geometryDirty = true; |
182 | |
183 | setImplicitWidth(size.width()); |
184 | setImplicitHeight(size.height()); |
185 | |
186 | emit sourceRectChanged(); |
187 | } |
188 | } |
189 | |
190 | /* Based on fill mode and our size, figure out the source/dest rects */ |
191 | void QQuickVideoOutput::_q_updateGeometry() |
192 | { |
193 | const QRectF rect(0, 0, width(), height()); |
194 | const QRectF absoluteRect(x(), y(), width(), height()); |
195 | |
196 | if (!m_geometryDirty && m_lastRect == absoluteRect) |
197 | return; |
198 | |
199 | QRectF oldContentRect(m_contentRect); |
200 | |
201 | m_geometryDirty = false; |
202 | m_lastRect = absoluteRect; |
203 | |
204 | const auto fill = m_aspectRatioMode; |
205 | if (m_nativeSize.isEmpty()) { |
206 | //this is necessary for item to receive the |
207 | //first paint event and configure video surface. |
208 | m_contentRect = rect; |
209 | } else if (fill == Qt::IgnoreAspectRatio) { |
210 | m_contentRect = rect; |
211 | } else { |
212 | QSizeF scaled = m_nativeSize; |
213 | scaled.scale(s: rect.size(), mode: fill); |
214 | |
215 | m_contentRect = QRectF(QPointF(), scaled); |
216 | m_contentRect.moveCenter(p: rect.center()); |
217 | } |
218 | |
219 | updateGeometry(); |
220 | |
221 | if (m_contentRect != oldContentRect) |
222 | emit contentRectChanged(); |
223 | } |
224 | |
225 | /*! |
226 | \qmlproperty int QtMultimedia::VideoOutput::orientation |
227 | |
228 | In some cases the source video stream requires a certain |
229 | orientation to be correct. This includes |
230 | sources like a camera viewfinder, where the displayed |
231 | viewfinder should match reality, no matter what rotation |
232 | the rest of the user interface has. |
233 | |
234 | This property allows you to apply a rotation (in steps |
235 | of 90 degrees) to compensate for any user interface |
236 | rotation, with positive values in the anti-clockwise direction. |
237 | |
238 | The orientation change will also affect the mapping |
239 | of coordinates from source to viewport. |
240 | */ |
241 | int QQuickVideoOutput::orientation() const |
242 | { |
243 | return m_orientation; |
244 | } |
245 | |
246 | void QQuickVideoOutput::setOrientation(int orientation) |
247 | { |
248 | // Make sure it's a multiple of 90. |
249 | if (orientation % 90) |
250 | return; |
251 | |
252 | // If there's no actual change, return |
253 | if (m_orientation == orientation) |
254 | return; |
255 | |
256 | // If the new orientation is the same effect |
257 | // as the old one, don't update the video node stuff |
258 | if ((m_orientation % 360) == (orientation % 360)) { |
259 | m_orientation = orientation; |
260 | emit orientationChanged(); |
261 | return; |
262 | } |
263 | |
264 | m_geometryDirty = true; |
265 | |
266 | // Otherwise, a new orientation |
267 | // See if we need to change aspect ratio orientation too |
268 | bool oldAspect = qIsDefaultAspect(o: m_orientation); |
269 | bool newAspect = qIsDefaultAspect(o: orientation); |
270 | |
271 | m_orientation = orientation; |
272 | |
273 | if (oldAspect != newAspect) { |
274 | m_nativeSize.transpose(); |
275 | |
276 | setImplicitWidth(m_nativeSize.width()); |
277 | setImplicitHeight(m_nativeSize.height()); |
278 | |
279 | // Source rectangle does not change for orientation |
280 | } |
281 | |
282 | update(); |
283 | emit orientationChanged(); |
284 | } |
285 | |
286 | /*! |
287 | \qmlproperty rectangle QtMultimedia::VideoOutput::contentRect |
288 | |
289 | This property holds the item coordinates of the area that |
290 | would contain video to render. With certain fill modes, |
291 | this rectangle will be larger than the visible area of the |
292 | \c VideoOutput. |
293 | |
294 | This property is useful when other coordinates are specified |
295 | in terms of the source dimensions - this applied for relative |
296 | (normalized) frame coordinates in the range of 0 to 1.0. |
297 | |
298 | Areas outside this will be transparent. |
299 | */ |
300 | QRectF QQuickVideoOutput::contentRect() const |
301 | { |
302 | return m_contentRect; |
303 | } |
304 | |
305 | /*! |
306 | \qmlproperty rectangle QtMultimedia::VideoOutput::sourceRect |
307 | |
308 | This property holds the area of the source video |
309 | content that is considered for rendering. The |
310 | values are in source pixel coordinates, adjusted for |
311 | the source's pixel aspect ratio. |
312 | |
313 | Note that typically the top left corner of this rectangle |
314 | will be \c {0,0} while the width and height will be the |
315 | width and height of the input content. Only when the video |
316 | source has a viewport set, these values will differ. |
317 | |
318 | The orientation setting does not affect this rectangle. |
319 | |
320 | \sa QVideoFrameFormat::viewport() |
321 | */ |
322 | QRectF QQuickVideoOutput::sourceRect() const |
323 | { |
324 | // We might have to transpose back |
325 | QSizeF size = m_nativeSize; |
326 | if (!size.isValid()) |
327 | return {}; |
328 | |
329 | if (!qIsDefaultAspect(o: m_orientation + m_frameOrientation)) |
330 | size.transpose(); |
331 | |
332 | |
333 | // Take the viewport into account for the top left position. |
334 | // m_nativeSize is already adjusted to the viewport, as it originates |
335 | // from QVideoFrameFormat::viewport(), which includes pixel aspect ratio |
336 | const QRectF viewport = adjustedViewport(); |
337 | Q_ASSERT(viewport.size() == size); |
338 | return QRectF(viewport.topLeft(), size); |
339 | } |
340 | |
341 | void QQuickVideoOutput::geometryChange(const QRectF &newGeometry, const QRectF &oldGeometry) |
342 | { |
343 | Q_UNUSED(newGeometry); |
344 | Q_UNUSED(oldGeometry); |
345 | |
346 | QQuickItem::geometryChange(newGeometry, oldGeometry); |
347 | |
348 | // Explicitly listen to geometry changes here. This is needed since changing the position does |
349 | // not trigger a call to updatePaintNode(). |
350 | // We need to react to position changes though, as the window backened's display rect gets |
351 | // changed in that situation. |
352 | _q_updateGeometry(); |
353 | } |
354 | |
355 | void QQuickVideoOutput::_q_invalidateSceneGraph() |
356 | { |
357 | invalidateSceneGraph(); |
358 | } |
359 | |
360 | void QQuickVideoOutput::_q_sceneGraphInitialized() |
361 | { |
362 | initRhiForSink(); |
363 | } |
364 | |
365 | void QQuickVideoOutput::releaseResources() |
366 | { |
367 | // Called on the gui thread when the window is closed or changed. |
368 | invalidateSceneGraph(); |
369 | } |
370 | |
371 | void QQuickVideoOutput::invalidateSceneGraph() |
372 | { |
373 | // Called on the render thread, e.g. when the context is lost. |
374 | // QMutexLocker lock(&m_frameMutex); |
375 | initRhiForSink(); |
376 | } |
377 | |
378 | void QQuickVideoOutput::initRhiForSink() |
379 | { |
380 | QRhi *rhi = m_window ? QQuickWindowPrivate::get(c: m_window)->rhi : nullptr; |
381 | m_sink->setRhi(rhi); |
382 | } |
383 | |
384 | void QQuickVideoOutput::itemChange(QQuickItem::ItemChange change, |
385 | const QQuickItem::ItemChangeData &changeData) |
386 | { |
387 | if (change != QQuickItem::ItemSceneChange) |
388 | return; |
389 | |
390 | if (changeData.window == m_window) |
391 | return; |
392 | if (m_window) |
393 | disconnect(receiver: m_window); |
394 | m_window = changeData.window; |
395 | |
396 | if (m_window) { |
397 | // We want to receive the signals in the render thread |
398 | QObject::connect(sender: m_window, signal: &QQuickWindow::sceneGraphInitialized, context: this, slot: &QQuickVideoOutput::_q_sceneGraphInitialized, |
399 | type: Qt::DirectConnection); |
400 | QObject::connect(sender: m_window, signal: &QQuickWindow::sceneGraphInvalidated, |
401 | context: this, slot: &QQuickVideoOutput::_q_invalidateSceneGraph, type: Qt::DirectConnection); |
402 | } |
403 | initRhiForSink(); |
404 | } |
405 | |
406 | QSize QQuickVideoOutput::nativeSize() const |
407 | { |
408 | return m_surfaceFormat.viewport().size(); |
409 | } |
410 | |
411 | void QQuickVideoOutput::updateGeometry() |
412 | { |
413 | const QRectF viewport = m_surfaceFormat.viewport(); |
414 | const QSizeF frameSize = m_surfaceFormat.frameSize(); |
415 | const QRectF normalizedViewport(viewport.x() / frameSize.width(), |
416 | viewport.y() / frameSize.height(), |
417 | viewport.width() / frameSize.width(), |
418 | viewport.height() / frameSize.height()); |
419 | const QRectF rect(0, 0, width(), height()); |
420 | if (nativeSize().isEmpty()) { |
421 | m_renderedRect = rect; |
422 | m_sourceTextureRect = normalizedViewport; |
423 | } else if (m_aspectRatioMode == Qt::IgnoreAspectRatio) { |
424 | m_renderedRect = rect; |
425 | m_sourceTextureRect = normalizedViewport; |
426 | } else if (m_aspectRatioMode == Qt::KeepAspectRatio) { |
427 | m_sourceTextureRect = normalizedViewport; |
428 | m_renderedRect = contentRect(); |
429 | } else if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) { |
430 | m_renderedRect = rect; |
431 | const qreal contentHeight = contentRect().height(); |
432 | const qreal contentWidth = contentRect().width(); |
433 | |
434 | // Calculate the size of the source rectangle without taking the viewport into account |
435 | const qreal relativeOffsetLeft = -contentRect().left() / contentWidth; |
436 | const qreal relativeOffsetTop = -contentRect().top() / contentHeight; |
437 | const qreal relativeWidth = rect.width() / contentWidth; |
438 | const qreal relativeHeight = rect.height() / contentHeight; |
439 | |
440 | // Now take the viewport size into account |
441 | const qreal totalOffsetLeft = normalizedViewport.x() + relativeOffsetLeft * normalizedViewport.width(); |
442 | const qreal totalOffsetTop = normalizedViewport.y() + relativeOffsetTop * normalizedViewport.height(); |
443 | const qreal totalWidth = normalizedViewport.width() * relativeWidth; |
444 | const qreal totalHeight = normalizedViewport.height() * relativeHeight; |
445 | |
446 | if (qIsDefaultAspect(o: orientation() + m_frameOrientation)) { |
447 | m_sourceTextureRect = QRectF(totalOffsetLeft, totalOffsetTop, |
448 | totalWidth, totalHeight); |
449 | } else { |
450 | m_sourceTextureRect = QRectF(totalOffsetTop, totalOffsetLeft, |
451 | totalHeight, totalWidth); |
452 | } |
453 | } |
454 | |
455 | if (m_surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop) { |
456 | qreal top = m_sourceTextureRect.top(); |
457 | m_sourceTextureRect.setTop(m_sourceTextureRect.bottom()); |
458 | m_sourceTextureRect.setBottom(top); |
459 | } |
460 | |
461 | if (m_surfaceFormat.isMirrored()) { |
462 | qreal left = m_sourceTextureRect.left(); |
463 | m_sourceTextureRect.setLeft(m_sourceTextureRect.right()); |
464 | m_sourceTextureRect.setRight(left); |
465 | } |
466 | } |
467 | |
468 | QSGNode *QQuickVideoOutput::updatePaintNode(QSGNode *oldNode, |
469 | QQuickItem::UpdatePaintNodeData *data) |
470 | { |
471 | Q_UNUSED(data); |
472 | _q_updateGeometry(); |
473 | |
474 | QSGVideoNode *videoNode = static_cast<QSGVideoNode *>(oldNode); |
475 | |
476 | QMutexLocker lock(&m_frameMutex); |
477 | |
478 | if (m_frameChanged) { |
479 | if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) { |
480 | qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed" ; |
481 | delete videoNode; |
482 | videoNode = nullptr; |
483 | } |
484 | |
485 | if (!m_frame.isValid()) { |
486 | qCDebug(qLcVideo) << "updatePaintNode: no frames yet" ; |
487 | m_frameChanged = false; |
488 | return nullptr; |
489 | } |
490 | |
491 | if (!videoNode) { |
492 | // Get a node that supports our frame. The surface is irrelevant, our |
493 | // QSGVideoItemSurface supports (logically) anything. |
494 | updateGeometry(); |
495 | videoNode = new QSGVideoNode(this, m_surfaceFormat); |
496 | qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType(); |
497 | } |
498 | } |
499 | |
500 | if (!videoNode) { |
501 | m_frameChanged = false; |
502 | m_frame = QVideoFrame(); |
503 | return nullptr; |
504 | } |
505 | |
506 | if (m_frameChanged) { |
507 | videoNode->setCurrentFrame(m_frame); |
508 | |
509 | //don't keep the frame for more than really necessary |
510 | m_frameChanged = false; |
511 | m_frame = QVideoFrame(); |
512 | } |
513 | |
514 | // Negative rotations need lots of %360 |
515 | videoNode->setTexturedRectGeometry(boundingRect: m_renderedRect, textureRect: m_sourceTextureRect, |
516 | orientation: qNormalizedOrientation(o: orientation())); |
517 | |
518 | return videoNode; |
519 | } |
520 | |
521 | QRectF QQuickVideoOutput::adjustedViewport() const |
522 | { |
523 | return m_surfaceFormat.viewport(); |
524 | } |
525 | |
526 | void QQuickVideoOutput::setFrame(const QVideoFrame &frame) |
527 | { |
528 | m_frameMutex.lock(); |
529 | m_surfaceFormat = frame.surfaceFormat(); |
530 | m_frame = frame; |
531 | m_frameOrientation = frame.rotationAngle(); |
532 | m_frameChanged = true; |
533 | m_frameMutex.unlock(); |
534 | } |
535 | |
536 | void QQuickVideoOutput::stop() |
537 | { |
538 | setFrame({}); |
539 | update(); |
540 | } |
541 | |
542 | QT_END_NAMESPACE |
543 | |
544 | #include "moc_qquickvideooutput_p.cpp" |
545 | |