1/****************************************************************************
2**
3** Copyright (C) 2016 The Qt Company Ltd.
4** Copyright (C) 2016 Research In Motion
5** Contact: https://www.qt.io/licensing/
6**
7** This file is part of the Qt Toolkit.
8**
9** $QT_BEGIN_LICENSE:LGPL$
10** Commercial License Usage
11** Licensees holding valid commercial Qt licenses may use this file in
12** accordance with the commercial license agreement provided with the
13** Software or, alternatively, in accordance with the terms contained in
14** a written agreement between you and The Qt Company. For licensing terms
15** and conditions see https://www.qt.io/terms-conditions. For further
16** information use the contact form at https://www.qt.io/contact-us.
17**
18** GNU Lesser General Public License Usage
19** Alternatively, this file may be used under the terms of the GNU Lesser
20** General Public License version 3 as published by the Free Software
21** Foundation and appearing in the file LICENSE.LGPL3 included in the
22** packaging of this file. Please review the following information to
23** ensure the GNU Lesser General Public License version 3 requirements
24** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
25**
26** GNU General Public License Usage
27** Alternatively, this file may be used under the terms of the GNU
28** General Public License version 2.0 or (at your option) the GNU General
29** Public license version 3 or any later version approved by the KDE Free
30** Qt Foundation. The licenses are as published by the Free Software
31** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
32** included in the packaging of this file. Please review the following
33** information to ensure the GNU General Public License requirements will
34** be met: https://www.gnu.org/licenses/gpl-2.0.html and
35** https://www.gnu.org/licenses/gpl-3.0.html.
36**
37** $QT_END_LICENSE$
38**
39****************************************************************************/
40
41#include "qdeclarativevideooutput_render_p.h"
42#include "qdeclarativevideooutput_p.h"
43#include <QtMultimedia/qabstractvideofilter.h>
44#include <QtMultimedia/qvideorenderercontrol.h>
45#include <QtMultimedia/qmediaservice.h>
46#include <QtCore/qloggingcategory.h>
47#include <private/qmediapluginloader_p.h>
48#include <private/qsgvideonode_p.h>
49
50#include <QtGui/QOpenGLContext>
51#include <QtQuick/QQuickWindow>
52#include <QtCore/QRunnable>
53
54QT_BEGIN_NAMESPACE
55
56Q_DECLARE_LOGGING_CATEGORY(qLcVideo)
57
58Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, videoNodeFactoryLoader,
59 (QSGVideoNodeFactoryInterface_iid, QLatin1String("video/videonode"), Qt::CaseInsensitive))
60
61QDeclarativeVideoRendererBackend::QDeclarativeVideoRendererBackend(QDeclarativeVideoOutput *parent)
62 : QDeclarativeVideoBackend(parent),
63 m_glContext(0),
64 m_frameChanged(false)
65{
66 m_surface = new QSGVideoItemSurface(this);
67 QObject::connect(sender: m_surface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)),
68 receiver: q, SLOT(_q_updateNativeSize()), Qt::QueuedConnection);
69
70 // Prioritize the plugin requested by the environment
71 QString requestedVideoNode = QString::fromLatin1(str: qgetenv(varName: "QT_VIDEONODE"));
72
73 const auto keys = videoNodeFactoryLoader()->keys();
74 for (const QString &key : keys) {
75 QObject *instance = videoNodeFactoryLoader()->instance(key);
76 QSGVideoNodeFactoryInterface* plugin = qobject_cast<QSGVideoNodeFactoryInterface*>(object: instance);
77 if (plugin) {
78 if (key == requestedVideoNode)
79 m_videoNodeFactories.prepend(t: plugin);
80 else
81 m_videoNodeFactories.append(t: plugin);
82 qCDebug(qLcVideo) << "found videonode plugin" << key << plugin;
83 }
84 }
85
86 // Append existing node factories as fallback if we have no plugins
87 m_videoNodeFactories.append(t: &m_i420Factory);
88 m_videoNodeFactories.append(t: &m_rgbFactory);
89 m_videoNodeFactories.append(t: &m_textureFactory);
90}
91
92QDeclarativeVideoRendererBackend::~QDeclarativeVideoRendererBackend()
93{
94 releaseSource();
95 releaseControl();
96 delete m_surface;
97}
98
99bool QDeclarativeVideoRendererBackend::init(QMediaService *service)
100{
101 // When there is no service, the source is an object with a "videoSurface" property, which
102 // doesn't require a QVideoRendererControl and therefore always works
103 if (!service)
104 return true;
105
106 if (QMediaControl *control = service->requestControl(QVideoRendererControl_iid)) {
107 if ((m_rendererControl = qobject_cast<QVideoRendererControl *>(object: control))) {
108 m_rendererControl->setSurface(m_surface);
109 m_service = service;
110 return true;
111 }
112 }
113 return false;
114}
115
116void QDeclarativeVideoRendererBackend::appendFilter(QAbstractVideoFilter *filter)
117{
118 QMutexLocker lock(&m_frameMutex);
119 m_filters.append(t: Filter(filter));
120}
121
122void QDeclarativeVideoRendererBackend::clearFilters()
123{
124 QMutexLocker lock(&m_frameMutex);
125 scheduleDeleteFilterResources();
126 m_filters.clear();
127}
128
129class FilterRunnableDeleter : public QRunnable
130{
131public:
132 FilterRunnableDeleter(const QList<QVideoFilterRunnable *> &runnables) : m_runnables(runnables) { }
133 void run() override {
134 for (QVideoFilterRunnable *runnable : qAsConst(t&: m_runnables))
135 delete runnable;
136 }
137private:
138 QList<QVideoFilterRunnable *> m_runnables;
139};
140
141void QDeclarativeVideoRendererBackend::scheduleDeleteFilterResources()
142{
143 if (!q->window())
144 return;
145
146 QList<QVideoFilterRunnable *> runnables;
147 for (int i = 0; i < m_filters.count(); ++i) {
148 if (m_filters[i].runnable) {
149 runnables.append(t: m_filters[i].runnable);
150 m_filters[i].runnable = 0;
151 }
152 }
153
154 if (!runnables.isEmpty()) {
155 // Request the scenegraph to run our cleanup job on the render thread.
156 // The execution of our QRunnable may happen after the QML tree including the QAbstractVideoFilter instance is
157 // destroyed on the main thread so no references to it must be used during cleanup.
158 q->window()->scheduleRenderJob(job: new FilterRunnableDeleter(runnables), schedule: QQuickWindow::BeforeSynchronizingStage);
159 }
160}
161
162void QDeclarativeVideoRendererBackend::releaseResources()
163{
164 // Called on the gui thread when the window is closed or changed.
165 invalidateSceneGraph();
166}
167
168void QDeclarativeVideoRendererBackend::invalidateSceneGraph()
169{
170 // Called on the render thread, e.g. when the context is lost.
171 QMutexLocker lock(&m_frameMutex);
172 for (int i = 0; i < m_filters.count(); ++i) {
173 if (m_filters[i].runnable) {
174 delete m_filters[i].runnable;
175 m_filters[i].runnable = 0;
176 }
177 }
178}
179
180void QDeclarativeVideoRendererBackend::itemChange(QQuickItem::ItemChange change,
181 const QQuickItem::ItemChangeData &changeData)
182{
183 if (change == QQuickItem::ItemSceneChange) {
184 if (changeData.window)
185 QObject::connect(sender: changeData.window, SIGNAL(sceneGraphInvalidated()),
186 receiver: q, SLOT(_q_invalidateSceneGraph()), Qt::DirectConnection);
187 }
188}
189
190void QDeclarativeVideoRendererBackend::releaseSource()
191{
192 if (q->source() && q->sourceType() == QDeclarativeVideoOutput::VideoSurfaceSource) {
193 if (q->source()->property(name: "videoSurface").value<QAbstractVideoSurface*>() == m_surface)
194 q->source()->setProperty(name: "videoSurface", value: QVariant::fromValue<QAbstractVideoSurface*>(value: 0));
195 }
196
197 m_surface->stop();
198}
199
200void QDeclarativeVideoRendererBackend::releaseControl()
201{
202 if (m_rendererControl) {
203 m_rendererControl->setSurface(0);
204 if (m_service)
205 m_service->releaseControl(control: m_rendererControl);
206 m_rendererControl = 0;
207 }
208}
209
210QSize QDeclarativeVideoRendererBackend::nativeSize() const
211{
212 return m_surfaceFormat.sizeHint();
213}
214
215void QDeclarativeVideoRendererBackend::updateGeometry()
216{
217 const QRectF viewport = m_surfaceFormat.viewport();
218 const QSizeF frameSize = m_surfaceFormat.frameSize();
219 const QRectF normalizedViewport(viewport.x() / frameSize.width(),
220 viewport.y() / frameSize.height(),
221 viewport.width() / frameSize.width(),
222 viewport.height() / frameSize.height());
223 const QRectF rect(0, 0, q->width(), q->height());
224 if (nativeSize().isEmpty()) {
225 m_renderedRect = rect;
226 m_sourceTextureRect = normalizedViewport;
227 } else if (q->fillMode() == QDeclarativeVideoOutput::Stretch) {
228 m_renderedRect = rect;
229 m_sourceTextureRect = normalizedViewport;
230 } else if (q->fillMode() == QDeclarativeVideoOutput::PreserveAspectFit) {
231 m_sourceTextureRect = normalizedViewport;
232 m_renderedRect = q->contentRect();
233 } else if (q->fillMode() == QDeclarativeVideoOutput::PreserveAspectCrop) {
234 m_renderedRect = rect;
235 const qreal contentHeight = q->contentRect().height();
236 const qreal contentWidth = q->contentRect().width();
237
238 // Calculate the size of the source rectangle without taking the viewport into account
239 const qreal relativeOffsetLeft = -q->contentRect().left() / contentWidth;
240 const qreal relativeOffsetTop = -q->contentRect().top() / contentHeight;
241 const qreal relativeWidth = rect.width() / contentWidth;
242 const qreal relativeHeight = rect.height() / contentHeight;
243
244 // Now take the viewport size into account
245 const qreal totalOffsetLeft = normalizedViewport.x() + relativeOffsetLeft * normalizedViewport.width();
246 const qreal totalOffsetTop = normalizedViewport.y() + relativeOffsetTop * normalizedViewport.height();
247 const qreal totalWidth = normalizedViewport.width() * relativeWidth;
248 const qreal totalHeight = normalizedViewport.height() * relativeHeight;
249
250 if (qIsDefaultAspect(o: q->orientation())) {
251 m_sourceTextureRect = QRectF(totalOffsetLeft, totalOffsetTop,
252 totalWidth, totalHeight);
253 } else {
254 m_sourceTextureRect = QRectF(totalOffsetTop, totalOffsetLeft,
255 totalHeight, totalWidth);
256 }
257 }
258
259 if (m_surfaceFormat.scanLineDirection() == QVideoSurfaceFormat::BottomToTop) {
260 qreal top = m_sourceTextureRect.top();
261 m_sourceTextureRect.setTop(m_sourceTextureRect.bottom());
262 m_sourceTextureRect.setBottom(top);
263 }
264
265 if (m_surfaceFormat.property(name: "mirrored").toBool()) {
266 qreal left = m_sourceTextureRect.left();
267 m_sourceTextureRect.setLeft(m_sourceTextureRect.right());
268 m_sourceTextureRect.setRight(left);
269 }
270}
271
272QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
273 QQuickItem::UpdatePaintNodeData *data)
274{
275 Q_UNUSED(data);
276 QSGVideoNode *videoNode = static_cast<QSGVideoNode *>(oldNode);
277
278 QMutexLocker lock(&m_frameMutex);
279
280 if (!m_glContext) {
281 m_glContext = QOpenGLContext::currentContext();
282 m_surface->scheduleOpenGLContextUpdate();
283
284 // Internal mechanism to call back the surface renderer from the QtQuick render thread
285 QObject *obj = m_surface->property(name: "_q_GLThreadCallback").value<QObject*>();
286 if (obj) {
287 QEvent ev(QEvent::User);
288 obj->event(event: &ev);
289 }
290 }
291
292 bool isFrameModified = false;
293 if (m_frameChanged) {
294 // Run the VideoFilter if there is one. This must be done before potentially changing the videonode below.
295 if (m_frame.isValid() && !m_filters.isEmpty()) {
296 for (int i = 0; i < m_filters.count(); ++i) {
297 QAbstractVideoFilter *filter = m_filters[i].filter;
298 QVideoFilterRunnable *&runnable = m_filters[i].runnable;
299 if (filter && filter->isActive()) {
300 // Create the filter runnable if not yet done. Ownership is taken and is tied to this thread, on which rendering happens.
301 if (!runnable)
302 runnable = filter->createFilterRunnable();
303 if (!runnable)
304 continue;
305
306 QVideoFilterRunnable::RunFlags flags;
307 if (i == m_filters.count() - 1)
308 flags |= QVideoFilterRunnable::LastInChain;
309
310 QVideoFrame newFrame = runnable->run(input: &m_frame, surfaceFormat: m_surfaceFormat, flags);
311
312 if (newFrame.isValid() && newFrame != m_frame) {
313 isFrameModified = true;
314 m_frame = newFrame;
315 }
316 }
317 }
318 }
319
320 if (videoNode && (videoNode->pixelFormat() != m_frame.pixelFormat() || videoNode->handleType() != m_frame.handleType())) {
321 qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed";
322 delete videoNode;
323 videoNode = 0;
324 }
325
326 if (!m_frame.isValid()) {
327 qCDebug(qLcVideo) << "updatePaintNode: no frames yet";
328 m_frameChanged = false;
329 return 0;
330 }
331
332 if (!videoNode) {
333 for (QSGVideoNodeFactoryInterface* factory : qAsConst(t&: m_videoNodeFactories)) {
334 // Get a node that supports our frame. The surface is irrelevant, our
335 // QSGVideoItemSurface supports (logically) anything.
336 QVideoSurfaceFormat nodeFormat(m_frame.size(), m_frame.pixelFormat(), m_frame.handleType());
337 nodeFormat.setYCbCrColorSpace(m_surfaceFormat.yCbCrColorSpace());
338 nodeFormat.setPixelAspectRatio(m_surfaceFormat.pixelAspectRatio());
339 nodeFormat.setScanLineDirection(m_surfaceFormat.scanLineDirection());
340 nodeFormat.setViewport(m_surfaceFormat.viewport());
341 nodeFormat.setFrameRate(m_surfaceFormat.frameRate());
342 // Update current surface format if something has changed.
343 m_surfaceFormat = nodeFormat;
344 videoNode = factory->createNode(format: nodeFormat);
345 if (videoNode) {
346 qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType()
347 << " Supported formats for the handle by this node:"
348 << factory->supportedPixelFormats(handleType: m_frame.handleType());
349 break;
350 }
351 }
352 }
353 }
354
355 if (!videoNode) {
356 m_frameChanged = false;
357 m_frame = QVideoFrame();
358 return 0;
359 }
360
361 // Negative rotations need lots of %360
362 videoNode->setTexturedRectGeometry(boundingRect: m_renderedRect, textureRect: m_sourceTextureRect,
363 orientation: qNormalizedOrientation(o: q->orientation()));
364 if (m_frameChanged) {
365 QSGVideoNode::FrameFlags flags;
366 if (isFrameModified)
367 flags |= QSGVideoNode::FrameFiltered;
368 videoNode->setCurrentFrame(frame: m_frame, flags);
369
370 if ((q->flushMode() == QDeclarativeVideoOutput::FirstFrame && !m_frameOnFlush.isValid())
371 || q->flushMode() == QDeclarativeVideoOutput::LastFrame) {
372 m_frameOnFlush = m_surfaceFormat.handleType() == QAbstractVideoBuffer::NoHandle
373 ? m_frame
374 : m_frame.image();
375 }
376
377 //don't keep the frame for more than really necessary
378 m_frameChanged = false;
379 m_frame = QVideoFrame();
380 }
381 return videoNode;
382}
383
384QAbstractVideoSurface *QDeclarativeVideoRendererBackend::videoSurface() const
385{
386 return m_surface;
387}
388
389QRectF QDeclarativeVideoRendererBackend::adjustedViewport() const
390{
391 const QRectF viewport = m_surfaceFormat.viewport();
392 const QSizeF pixelAspectRatio = m_surfaceFormat.pixelAspectRatio();
393
394 if (pixelAspectRatio.isValid()) {
395 const qreal ratio = pixelAspectRatio.width() / pixelAspectRatio.height();
396 QRectF result = viewport;
397 result.setX(result.x() * ratio);
398 result.setWidth(result.width() * ratio);
399 return result;
400 }
401
402 return viewport;
403}
404
405QOpenGLContext *QDeclarativeVideoRendererBackend::glContext() const
406{
407 return m_glContext;
408}
409
410void QDeclarativeVideoRendererBackend::present(const QVideoFrame &frame)
411{
412 m_frameMutex.lock();
413 m_frame = frame.isValid() ? frame : m_frameOnFlush;
414 m_frameChanged = true;
415 m_frameMutex.unlock();
416
417 q->update();
418}
419
420void QDeclarativeVideoRendererBackend::stop()
421{
422 present(frame: QVideoFrame());
423}
424
425QSGVideoItemSurface::QSGVideoItemSurface(QDeclarativeVideoRendererBackend *backend, QObject *parent)
426 : QAbstractVideoSurface(parent),
427 m_backend(backend)
428{
429}
430
431QSGVideoItemSurface::~QSGVideoItemSurface()
432{
433}
434
435QList<QVideoFrame::PixelFormat> QSGVideoItemSurface::supportedPixelFormats(
436 QAbstractVideoBuffer::HandleType handleType) const
437{
438 QList<QVideoFrame::PixelFormat> formats;
439
440 static bool noGLTextures = false;
441 static bool noGLTexturesChecked = false;
442 if (handleType == QAbstractVideoBuffer::GLTextureHandle) {
443 if (!noGLTexturesChecked) {
444 noGLTexturesChecked = true;
445 noGLTextures = qEnvironmentVariableIsSet(varName: "QT_QUICK_NO_TEXTURE_VIDEOFRAMES");
446 }
447 if (noGLTextures)
448 return formats;
449 }
450
451 for (QSGVideoNodeFactoryInterface* factory : qAsConst(t&: m_backend->m_videoNodeFactories))
452 formats.append(t: factory->supportedPixelFormats(handleType));
453
454 return formats;
455}
456
457bool QSGVideoItemSurface::start(const QVideoSurfaceFormat &format)
458{
459 qCDebug(qLcVideo) << "Video surface format:" << format << "all supported formats:" << supportedPixelFormats(handleType: format.handleType());
460 m_backend->m_frameOnFlush = QVideoFrame();
461
462 if (!supportedPixelFormats(handleType: format.handleType()).contains(t: format.pixelFormat()))
463 return false;
464
465 m_backend->m_surfaceFormat = format;
466 return QAbstractVideoSurface::start(format);
467}
468
469void QSGVideoItemSurface::stop()
470{
471 m_backend->stop();
472 QAbstractVideoSurface::stop();
473}
474
475bool QSGVideoItemSurface::present(const QVideoFrame &frame)
476{
477 m_backend->present(frame);
478 return true;
479}
480
481void QSGVideoItemSurface::scheduleOpenGLContextUpdate()
482{
483 //This method is called from render thread
484 QMetaObject::invokeMethod(obj: this, member: "updateOpenGLContext");
485}
486
487void QSGVideoItemSurface::updateOpenGLContext()
488{
489 //Set a dynamic property to access the OpenGL context in Qt Quick render thread.
490 this->setProperty(name: "GLContext", value: QVariant::fromValue<QObject*>(value: m_backend->glContext()));
491}
492
493QT_END_NAMESPACE
494

source code of qtmultimedia/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp