1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qsgvideonode_p.h" |
5 | #include <QtQuick/qsgmaterial.h> |
6 | #include "qsgvideotexture_p.h" |
7 | #include <QtMultimedia/private/qvideotexturehelper_p.h> |
8 | #include <private/qsginternaltextnode_p.h> |
9 | #include <private/qquickitem_p.h> |
10 | #include <private/qquickvideooutput_p.h> |
11 | #include <private/qhwvideobuffer_p.h> |
12 | |
13 | QT_BEGIN_NAMESPACE |
14 | |
15 | /* Helpers */ |
16 | static inline void qSetGeom(QSGGeometry::TexturedPoint2D *v, const QPointF &p) |
17 | { |
18 | v->x = p.x(); |
19 | v->y = p.y(); |
20 | } |
21 | |
22 | static inline void qSetTex(QSGGeometry::TexturedPoint2D *v, const QPointF &p) |
23 | { |
24 | v->tx = p.x(); |
25 | v->ty = p.y(); |
26 | } |
27 | |
28 | static inline void qSwapTex(QSGGeometry::TexturedPoint2D *v0, QSGGeometry::TexturedPoint2D *v1) |
29 | { |
30 | auto tvx = v0->tx; |
31 | auto tvy = v0->ty; |
32 | v0->tx = v1->tx; |
33 | v0->ty = v1->ty; |
34 | v1->tx = tvx; |
35 | v1->ty = tvy; |
36 | } |
37 | |
38 | class QSGVideoMaterial; |
39 | |
40 | class QSGVideoMaterialRhiShader : public QSGMaterialShader |
41 | { |
42 | public: |
43 | QSGVideoMaterialRhiShader(const QVideoFrameFormat &videoFormat, |
44 | const QRhiSwapChain::Format surfaceFormat, |
45 | const QRhiSwapChainHdrInfo &hdrInfo) |
46 | : m_videoFormat(videoFormat) |
47 | , m_surfaceFormat(surfaceFormat) |
48 | , m_hdrInfo(hdrInfo) |
49 | { |
50 | setShaderFileName(stage: VertexStage, filename: QVideoTextureHelper::vertexShaderFileName(format: m_videoFormat)); |
51 | setShaderFileName( |
52 | stage: FragmentStage, |
53 | filename: QVideoTextureHelper::fragmentShaderFileName(format: m_videoFormat, surfaceFormat: m_surfaceFormat)); |
54 | } |
55 | |
56 | bool updateUniformData(RenderState &state, QSGMaterial *newMaterial, |
57 | QSGMaterial *oldMaterial) override; |
58 | |
59 | void updateSampledImage(RenderState &state, int binding, QSGTexture **texture, |
60 | QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override; |
61 | |
62 | protected: |
63 | QVideoFrameFormat m_videoFormat; |
64 | QRhiSwapChain::Format m_surfaceFormat; |
65 | QRhiSwapChainHdrInfo m_hdrInfo; |
66 | }; |
67 | |
68 | class QSGVideoMaterial : public QSGMaterial |
69 | { |
70 | public: |
71 | QSGVideoMaterial(const QVideoFrameFormat &videoFormat); |
72 | |
73 | [[nodiscard]] QSGMaterialType *type() const override { |
74 | static constexpr int NFormats = QRhiSwapChain::HDRExtendedDisplayP3Linear + 1; |
75 | static QSGMaterialType type[QVideoFrameFormat::NPixelFormats][NFormats]; |
76 | return &type[m_videoFormat.pixelFormat()][m_surfaceFormat]; |
77 | } |
78 | |
79 | [[nodiscard]] QSGMaterialShader *createShader(QSGRendererInterface::RenderMode) const override { |
80 | return new QSGVideoMaterialRhiShader(m_videoFormat, m_surfaceFormat, m_hdrInfo); |
81 | } |
82 | |
83 | int compare(const QSGMaterial *other) const override { |
84 | const QSGVideoMaterial *m = static_cast<const QSGVideoMaterial *>(other); |
85 | |
86 | qint64 diff = m_textures[0].comparisonKey() - m->m_textures[0].comparisonKey(); |
87 | if (!diff) |
88 | diff = m_textures[1].comparisonKey() - m->m_textures[1].comparisonKey(); |
89 | if (!diff) |
90 | diff = m_textures[2].comparisonKey() - m->m_textures[2].comparisonKey(); |
91 | |
92 | return diff < 0 ? -1 : (diff > 0 ? 1 : 0); |
93 | } |
94 | |
95 | void updateBlending() { |
96 | // ### respect video formats with Alpha |
97 | setFlag(flags: Blending, on: !qFuzzyCompare(p1: m_opacity, p2: float(1.0))); |
98 | } |
99 | |
100 | void setCurrentFrame(const QVideoFrame &frame) { |
101 | m_currentFrame = frame; |
102 | m_texturesDirty = true; |
103 | } |
104 | |
105 | void setSurfaceFormat(const QRhiSwapChain::Format surfaceFormat) |
106 | { |
107 | m_surfaceFormat = surfaceFormat; |
108 | } |
109 | |
110 | void setHdrInfo(const QRhiSwapChainHdrInfo &hdrInfo) |
111 | { |
112 | m_hdrInfo = hdrInfo; |
113 | } |
114 | |
115 | void updateTextures(QRhi *rhi, QRhiResourceUpdateBatch *resourceUpdates); |
116 | |
117 | QVideoFrameFormat m_videoFormat; |
118 | QRhiSwapChain::Format m_surfaceFormat = QRhiSwapChain::SDR; |
119 | float m_opacity = 1.0f; |
120 | QRhiSwapChainHdrInfo m_hdrInfo; |
121 | |
122 | bool m_texturesDirty = false; |
123 | QVideoFrame m_currentFrame; |
124 | |
125 | enum { NVideoFrameSlots = 4 }; |
126 | QVideoFrame m_videoFrameSlots[NVideoFrameSlots]; |
127 | std::array<QSGVideoTexture, 3> m_textures; |
128 | std::unique_ptr<QVideoFrameTextures> m_videoFrameTextures; |
129 | }; |
130 | |
131 | void QSGVideoMaterial::updateTextures(QRhi *rhi, QRhiResourceUpdateBatch *resourceUpdates) |
132 | { |
133 | if (!m_texturesDirty) |
134 | return; |
135 | |
136 | // keep the video frames alive until we know that they are not needed anymore |
137 | Q_ASSERT(NVideoFrameSlots >= rhi->resourceLimit(QRhi::FramesInFlight)); |
138 | m_videoFrameSlots[rhi->currentFrameSlot()] = m_currentFrame; |
139 | |
140 | // update and upload all textures |
141 | m_videoFrameTextures = QVideoTextureHelper::createTextures(frame&: m_currentFrame, rhi, rub: resourceUpdates, oldTextures: std::move(m_videoFrameTextures)); |
142 | if (!m_videoFrameTextures) |
143 | return; |
144 | |
145 | for (int plane = 0; plane < 3; ++plane) |
146 | m_textures[plane].setRhiTexture(m_videoFrameTextures->texture(plane)); |
147 | m_texturesDirty = false; |
148 | } |
149 | |
150 | |
151 | bool QSGVideoMaterialRhiShader::updateUniformData(RenderState &state, QSGMaterial *newMaterial, |
152 | QSGMaterial *oldMaterial) |
153 | { |
154 | Q_UNUSED(oldMaterial); |
155 | |
156 | auto m = static_cast<QSGVideoMaterial *>(newMaterial); |
157 | |
158 | if (!state.isMatrixDirty() && !state.isOpacityDirty()) |
159 | return false; |
160 | |
161 | if (state.isOpacityDirty()) { |
162 | m->m_opacity = state.opacity(); |
163 | m->updateBlending(); |
164 | } |
165 | |
166 | // Do this here, not in updateSampledImage. First, with multiple textures we want to |
167 | // do this once. More importantly, on some platforms (Android) the externalMatrix is |
168 | // updated by this function and we need that already in updateUniformData. |
169 | m->updateTextures(rhi: state.rhi(), resourceUpdates: state.resourceUpdateBatch()); |
170 | |
171 | float maxNits = 100; // Default to de-facto SDR nits |
172 | if (m_surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear) { |
173 | if (m_hdrInfo.limitsType == QRhiSwapChainHdrInfo::ColorComponentValue) |
174 | maxNits = 100 * m_hdrInfo.limits.colorComponentValue.maxColorComponentValue; |
175 | else |
176 | maxNits = m_hdrInfo.limits.luminanceInNits.maxLuminance; |
177 | } |
178 | |
179 | QVideoTextureHelper::updateUniformData(dst: state.uniformData(), format: m_videoFormat, |
180 | frame: m->m_currentFrame, transform: state.combinedMatrix(), opacity: state.opacity(), maxNits); |
181 | |
182 | return true; |
183 | } |
184 | |
185 | void QSGVideoMaterialRhiShader::updateSampledImage(RenderState &state, int binding, QSGTexture **texture, |
186 | QSGMaterial *newMaterial, QSGMaterial *oldMaterial) |
187 | { |
188 | Q_UNUSED(state); |
189 | Q_UNUSED(oldMaterial); |
190 | if (binding < 1 || binding > 3) |
191 | return; |
192 | |
193 | auto m = static_cast<QSGVideoMaterial *>(newMaterial); |
194 | *texture = &m->m_textures[binding - 1]; |
195 | } |
196 | |
197 | QSGVideoMaterial::QSGVideoMaterial(const QVideoFrameFormat &videoFormat) |
198 | : m_videoFormat(videoFormat) |
199 | { |
200 | setFlag(flags: Blending, on: false); |
201 | } |
202 | |
203 | QSGVideoNode::QSGVideoNode(QQuickVideoOutput *parent, const QVideoFrameFormat &videoFormat) |
204 | : m_parent(parent), m_videoFormat(videoFormat) |
205 | { |
206 | setFlag(QSGNode::OwnsMaterial); |
207 | setFlag(QSGNode::OwnsGeometry); |
208 | m_material = new QSGVideoMaterial(videoFormat); |
209 | setMaterial(m_material); |
210 | } |
211 | |
212 | QSGVideoNode::~QSGVideoNode() |
213 | { |
214 | delete m_subtitleTextNode; |
215 | } |
216 | |
217 | void QSGVideoNode::setCurrentFrame(const QVideoFrame &frame) |
218 | { |
219 | m_material->setCurrentFrame(frame); |
220 | markDirty(bits: DirtyMaterial); |
221 | updateSubtitle(frame); |
222 | } |
223 | |
224 | void QSGVideoNode::setSurfaceFormat(const QRhiSwapChain::Format surfaceFormat) |
225 | { |
226 | m_material->setSurfaceFormat(surfaceFormat); |
227 | markDirty(bits: DirtyMaterial); |
228 | } |
229 | |
230 | void QSGVideoNode::setHdrInfo(const QRhiSwapChainHdrInfo &hdrInfo) |
231 | { |
232 | m_material->setHdrInfo(hdrInfo); |
233 | markDirty(bits: DirtyMaterial); |
234 | } |
235 | |
236 | void QSGVideoNode::updateSubtitle(const QVideoFrame &frame) |
237 | { |
238 | QSize subtitleFrameSize = m_rect.size().toSize(); |
239 | if (subtitleFrameSize.isEmpty()) |
240 | return; |
241 | |
242 | subtitleFrameSize = qRotatedFrameSize(size: subtitleFrameSize, rotation: m_orientation); |
243 | |
244 | if (!m_subtitleLayout.update(frameSize: subtitleFrameSize, text: frame.subtitleText())) |
245 | return; |
246 | |
247 | delete m_subtitleTextNode; |
248 | m_subtitleTextNode = nullptr; |
249 | if (frame.subtitleText().isEmpty()) |
250 | return; |
251 | |
252 | QQuickItemPrivate *parent_d = QQuickItemPrivate::get(item: m_parent); |
253 | |
254 | m_subtitleTextNode = parent_d->sceneGraphContext()->createInternalTextNode(renderContext: parent_d->sceneGraphRenderContext()); |
255 | m_subtitleTextNode->setColor(Qt::white); |
256 | QColor bgColor = Qt::black; |
257 | bgColor.setAlpha(128); |
258 | m_subtitleTextNode->addRectangleNode(rect: m_subtitleLayout.bounds, color: bgColor); |
259 | m_subtitleTextNode->addTextLayout(position: m_subtitleLayout.layout.position(), layout: &m_subtitleLayout.layout); |
260 | appendChildNode(node: m_subtitleTextNode); |
261 | setSubtitleGeometry(); |
262 | } |
263 | |
264 | void QSGVideoNode::setSubtitleGeometry() |
265 | { |
266 | if (!m_subtitleTextNode) |
267 | return; |
268 | |
269 | if (m_material) |
270 | updateSubtitle(frame: m_material->m_currentFrame); |
271 | |
272 | float rotate = -1.f * m_orientation; |
273 | float yTranslate = 0; |
274 | float xTranslate = 0; |
275 | if (m_orientation == 90) { |
276 | yTranslate = m_rect.height(); |
277 | } else if (m_orientation == 180) { |
278 | yTranslate = m_rect.height(); |
279 | xTranslate = m_rect.width(); |
280 | } else if (m_orientation == 270) { |
281 | xTranslate = m_rect.width(); |
282 | } |
283 | |
284 | QMatrix4x4 transform; |
285 | transform.translate(x: m_rect.x() + xTranslate, y: m_rect.y() + yTranslate); |
286 | transform.rotate(angle: rotate, x: 0, y: 0, z: 1); |
287 | |
288 | m_subtitleTextNode->setMatrix(transform); |
289 | m_subtitleTextNode->markDirty(bits: DirtyGeometry); |
290 | } |
291 | |
292 | /* Update the vertices and texture coordinates. Orientation must be in {0,90,180,270} */ |
293 | void QSGVideoNode::setTexturedRectGeometry(const QRectF &rect, const QRectF &textureRect, int orientation) |
294 | { |
295 | const VideoTransformation currentFrameTransformation = |
296 | qNormalizedFrameTransformation(frame: m_material ? m_material->m_currentFrame : QVideoFrame{}); |
297 | |
298 | if (rect == m_rect && textureRect == m_textureRect && orientation == m_orientation |
299 | && currentFrameTransformation == m_frameTransformation) |
300 | return; |
301 | |
302 | m_rect = rect; |
303 | m_textureRect = textureRect; |
304 | m_orientation = orientation; |
305 | m_frameTransformation = currentFrameTransformation; |
306 | |
307 | QSGGeometry *g = geometry(); |
308 | |
309 | if (g == nullptr) |
310 | g = new QSGGeometry(QSGGeometry::defaultAttributes_TexturedPoint2D(), 4); |
311 | |
312 | QSGGeometry::TexturedPoint2D *v = g->vertexDataAsTexturedPoint2D(); |
313 | |
314 | // Vertexes: |
315 | // 0 2 |
316 | // |
317 | // 1 3 |
318 | |
319 | // Set geometry first |
320 | qSetGeom(v: v + 0, p: rect.topLeft()); |
321 | qSetGeom(v: v + 1, p: rect.bottomLeft()); |
322 | qSetGeom(v: v + 2, p: rect.topRight()); |
323 | qSetGeom(v: v + 3, p: rect.bottomRight()); |
324 | |
325 | // and then texture coordinates |
326 | switch (currentFrameTransformation.rotation) { |
327 | default: |
328 | // tl, bl, tr, br |
329 | qSetTex(v: v + 0, p: textureRect.topLeft()); |
330 | qSetTex(v: v + 1, p: textureRect.bottomLeft()); |
331 | qSetTex(v: v + 2, p: textureRect.topRight()); |
332 | qSetTex(v: v + 3, p: textureRect.bottomRight()); |
333 | break; |
334 | |
335 | case QtVideo::Rotation::Clockwise90: |
336 | // bl, br, tl, tr |
337 | qSetTex(v: v + 0, p: textureRect.bottomLeft()); |
338 | qSetTex(v: v + 1, p: textureRect.bottomRight()); |
339 | qSetTex(v: v + 2, p: textureRect.topLeft()); |
340 | qSetTex(v: v + 3, p: textureRect.topRight()); |
341 | break; |
342 | |
343 | case QtVideo::Rotation::Clockwise180: |
344 | // br, tr, bl, tl |
345 | qSetTex(v: v + 0, p: textureRect.bottomRight()); |
346 | qSetTex(v: v + 1, p: textureRect.topRight()); |
347 | qSetTex(v: v + 2, p: textureRect.bottomLeft()); |
348 | qSetTex(v: v + 3, p: textureRect.topLeft()); |
349 | break; |
350 | |
351 | case QtVideo::Rotation::Clockwise270: |
352 | // tr, tl, br, bl |
353 | qSetTex(v: v + 0, p: textureRect.topRight()); |
354 | qSetTex(v: v + 1, p: textureRect.topLeft()); |
355 | qSetTex(v: v + 2, p: textureRect.bottomRight()); |
356 | qSetTex(v: v + 3, p: textureRect.bottomLeft()); |
357 | break; |
358 | } |
359 | |
360 | if (m_frameTransformation.mirrorredHorizontallyAfterRotation) { |
361 | qSwapTex(v0: v + 0, v1: v + 2); |
362 | qSwapTex(v0: v + 1, v1: v + 3); |
363 | } |
364 | |
365 | if (!geometry()) |
366 | setGeometry(g); |
367 | |
368 | markDirty(bits: DirtyGeometry); |
369 | |
370 | setSubtitleGeometry(); |
371 | } |
372 | |
373 | QT_END_NAMESPACE |
374 | |