1 | /**************************************************************************** |
2 | ** |
3 | ** Copyright (C) 2016 The Qt Company Ltd. |
4 | ** Contact: https://www.qt.io/licensing/ |
5 | ** |
6 | ** This file is part of the Qt Toolkit. |
7 | ** |
8 | ** $QT_BEGIN_LICENSE:LGPL$ |
9 | ** Commercial License Usage |
10 | ** Licensees holding valid commercial Qt licenses may use this file in |
11 | ** accordance with the commercial license agreement provided with the |
12 | ** Software or, alternatively, in accordance with the terms contained in |
13 | ** a written agreement between you and The Qt Company. For licensing terms |
14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
15 | ** information use the contact form at https://www.qt.io/contact-us. |
16 | ** |
17 | ** GNU Lesser General Public License Usage |
18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
19 | ** General Public License version 3 as published by the Free Software |
20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
21 | ** packaging of this file. Please review the following information to |
22 | ** ensure the GNU Lesser General Public License version 3 requirements |
23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
24 | ** |
25 | ** GNU General Public License Usage |
26 | ** Alternatively, this file may be used under the terms of the GNU |
27 | ** General Public License version 2.0 or (at your option) the GNU General |
28 | ** Public license version 3 or any later version approved by the KDE Free |
29 | ** Qt Foundation. The licenses are as published by the Free Software |
30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
31 | ** included in the packaging of this file. Please review the following |
32 | ** information to ensure the GNU General Public License requirements will |
33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
35 | ** |
36 | ** $QT_END_LICENSE$ |
37 | ** |
38 | ****************************************************************************/ |
39 | #include "qsgvideonode_yuv_p.h" |
40 | #include <QtCore/qmutex.h> |
41 | #include <QtQuick/qsgtexturematerial.h> |
42 | #include <QtQuick/qsgmaterial.h> |
43 | #include <QtGui/QOpenGLContext> |
44 | #include <QtGui/QOpenGLFunctions> |
45 | #include <QtGui/QOpenGLShaderProgram> |
46 | |
47 | #ifndef GL_RED |
48 | #define GL_RED 0x1903 |
49 | #endif |
50 | #ifndef GL_GREEN |
51 | #define GL_GREEN 0x1904 |
52 | #endif |
53 | #ifndef GL_RG |
54 | #define GL_RG 0x8227 |
55 | #endif |
56 | #ifndef GL_TEXTURE_SWIZZLE_R |
57 | #define GL_TEXTURE_SWIZZLE_R 0x8E42 |
58 | #endif |
59 | #ifndef GL_TEXTURE_SWIZZLE_G |
60 | #define GL_TEXTURE_SWIZZLE_G 0x8E43 |
61 | #endif |
62 | #ifndef GL_TEXTURE_SWIZZLE_B |
63 | #define GL_TEXTURE_SWIZZLE_B 0x8E44 |
64 | #endif |
65 | #ifndef GL_TEXTURE_SWIZZLE_A |
66 | #define GL_TEXTURE_SWIZZLE_A 0x8E45 |
67 | #endif |
68 | |
69 | QT_BEGIN_NAMESPACE |
70 | |
71 | QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats( |
72 | QAbstractVideoBuffer::HandleType handleType) const |
73 | { |
74 | QList<QVideoFrame::PixelFormat> formats; |
75 | |
76 | if (handleType == QAbstractVideoBuffer::NoHandle) { |
77 | formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12 << QVideoFrame::Format_YUV422P |
78 | << QVideoFrame::Format_NV12 << QVideoFrame::Format_NV21 |
79 | << QVideoFrame::Format_UYVY << QVideoFrame::Format_YUYV; |
80 | } |
81 | |
82 | return formats; |
83 | } |
84 | |
85 | QSGVideoNode *QSGVideoNodeFactory_YUV::createNode(const QVideoSurfaceFormat &format) |
86 | { |
87 | if (supportedPixelFormats(handleType: format.handleType()).contains(t: format.pixelFormat())) |
88 | return new QSGVideoNode_YUV(format); |
89 | |
90 | return 0; |
91 | } |
92 | |
93 | |
94 | class QSGVideoMaterialShader_YUV_BiPlanar : public QSGMaterialShader |
95 | { |
96 | public: |
97 | QSGVideoMaterialShader_YUV_BiPlanar() |
98 | : QSGMaterialShader() |
99 | { |
100 | setShaderSourceFile(type: QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo.vert" )); |
101 | setShaderSourceFile(type: QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo.frag" )); |
102 | } |
103 | |
104 | void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override; |
105 | |
106 | char const *const *attributeNames() const override { |
107 | static const char *names[] = { |
108 | "qt_VertexPosition" , |
109 | "qt_VertexTexCoord" , |
110 | 0 |
111 | }; |
112 | return names; |
113 | } |
114 | |
115 | protected: |
116 | void initialize() override { |
117 | m_id_matrix = program()->uniformLocation(name: "qt_Matrix" ); |
118 | m_id_plane1Width = program()->uniformLocation(name: "plane1Width" ); |
119 | m_id_plane2Width = program()->uniformLocation(name: "plane2Width" ); |
120 | m_id_plane1Texture = program()->uniformLocation(name: "plane1Texture" ); |
121 | m_id_plane2Texture = program()->uniformLocation(name: "plane2Texture" ); |
122 | m_id_colorMatrix = program()->uniformLocation(name: "colorMatrix" ); |
123 | m_id_opacity = program()->uniformLocation(name: "opacity" ); |
124 | } |
125 | |
126 | int m_id_matrix; |
127 | int m_id_plane1Width; |
128 | int m_id_plane2Width; |
129 | int m_id_plane1Texture; |
130 | int m_id_plane2Texture; |
131 | int m_id_colorMatrix; |
132 | int m_id_opacity; |
133 | }; |
134 | |
135 | class QSGVideoMaterialShader_UYVY : public QSGMaterialShader |
136 | { |
137 | public: |
138 | QSGVideoMaterialShader_UYVY() |
139 | : QSGMaterialShader() |
140 | { |
141 | setShaderSourceFile(type: QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/monoplanarvideo.vert" )); |
142 | setShaderSourceFile(type: QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/uyvyvideo.frag" )); |
143 | } |
144 | |
145 | void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override; |
146 | |
147 | char const *const *attributeNames() const override { |
148 | static const char *names[] = { |
149 | "qt_VertexPosition" , |
150 | "qt_VertexTexCoord" , |
151 | 0 |
152 | }; |
153 | return names; |
154 | } |
155 | |
156 | protected: |
157 | void initialize() override { |
158 | m_id_matrix = program()->uniformLocation(name: "qt_Matrix" ); |
159 | m_id_yTexture = program()->uniformLocation(name: "yTexture" ); |
160 | m_id_uvTexture = program()->uniformLocation(name: "uvTexture" ); |
161 | m_id_colorMatrix = program()->uniformLocation(name: "colorMatrix" ); |
162 | m_id_opacity = program()->uniformLocation(name: "opacity" ); |
163 | QSGMaterialShader::initialize(); |
164 | } |
165 | |
166 | int m_id_matrix; |
167 | int m_id_yTexture; |
168 | int m_id_uvTexture; |
169 | int m_id_colorMatrix; |
170 | int m_id_opacity; |
171 | }; |
172 | |
173 | |
174 | class QSGVideoMaterialShader_YUYV : public QSGVideoMaterialShader_UYVY |
175 | { |
176 | public: |
177 | QSGVideoMaterialShader_YUYV() |
178 | : QSGVideoMaterialShader_UYVY() |
179 | { |
180 | setShaderSourceFile(type: QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/yuyvvideo.frag" )); |
181 | } |
182 | }; |
183 | |
184 | |
185 | class QSGVideoMaterialShader_YUV_BiPlanar_swizzle : public QSGVideoMaterialShader_YUV_BiPlanar |
186 | { |
187 | public: |
188 | QSGVideoMaterialShader_YUV_BiPlanar_swizzle() |
189 | : QSGVideoMaterialShader_YUV_BiPlanar() |
190 | { |
191 | setShaderSourceFile(type: QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/biplanaryuvvideo_swizzle.frag" )); |
192 | } |
193 | }; |
194 | |
195 | |
196 | class QSGVideoMaterialShader_YUV_TriPlanar : public QSGVideoMaterialShader_YUV_BiPlanar |
197 | { |
198 | public: |
199 | QSGVideoMaterialShader_YUV_TriPlanar() |
200 | : QSGVideoMaterialShader_YUV_BiPlanar() |
201 | { |
202 | setShaderSourceFile(type: QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/triplanaryuvvideo.vert" )); |
203 | setShaderSourceFile(type: QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/triplanaryuvvideo.frag" )); |
204 | } |
205 | |
206 | void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override; |
207 | |
208 | protected: |
209 | void initialize() override { |
210 | m_id_plane3Width = program()->uniformLocation(name: "plane3Width" ); |
211 | m_id_plane3Texture = program()->uniformLocation(name: "plane3Texture" ); |
212 | QSGVideoMaterialShader_YUV_BiPlanar::initialize(); |
213 | } |
214 | |
215 | int m_id_plane3Width; |
216 | int m_id_plane3Texture; |
217 | }; |
218 | |
219 | |
220 | class QSGVideoMaterial_YUV : public QSGMaterial |
221 | { |
222 | public: |
223 | QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format); |
224 | ~QSGVideoMaterial_YUV(); |
225 | |
226 | QSGMaterialType *type() const override { |
227 | static QSGMaterialType biPlanarType, biPlanarSwizzleType, triPlanarType, uyvyType, yuyvType; |
228 | |
229 | switch (m_format.pixelFormat()) { |
230 | case QVideoFrame::Format_NV12: |
231 | return &biPlanarType; |
232 | case QVideoFrame::Format_NV21: |
233 | return &biPlanarSwizzleType; |
234 | case QVideoFrame::Format_UYVY: |
235 | return &uyvyType; |
236 | case QVideoFrame::Format_YUYV: |
237 | return &yuyvType; |
238 | default: // Currently: YUV420P, YUV422P and YV12 |
239 | return &triPlanarType; |
240 | } |
241 | } |
242 | |
243 | QSGMaterialShader *createShader() const override { |
244 | switch (m_format.pixelFormat()) { |
245 | case QVideoFrame::Format_NV12: |
246 | return new QSGVideoMaterialShader_YUV_BiPlanar; |
247 | case QVideoFrame::Format_NV21: |
248 | return new QSGVideoMaterialShader_YUV_BiPlanar_swizzle; |
249 | case QVideoFrame::Format_UYVY: |
250 | return new QSGVideoMaterialShader_UYVY; |
251 | case QVideoFrame::Format_YUYV: |
252 | return new QSGVideoMaterialShader_YUYV; |
253 | default: // Currently: YUV420P, YUV422P and YV12 |
254 | return new QSGVideoMaterialShader_YUV_TriPlanar; |
255 | } |
256 | } |
257 | |
258 | int compare(const QSGMaterial *other) const override { |
259 | const QSGVideoMaterial_YUV *m = static_cast<const QSGVideoMaterial_YUV *>(other); |
260 | if (!m_textureIds[0]) |
261 | return 1; |
262 | |
263 | int d = m_textureIds[0] - m->m_textureIds[0]; |
264 | if (d) |
265 | return d; |
266 | else if ((d = m_textureIds[1] - m->m_textureIds[1]) != 0) |
267 | return d; |
268 | else |
269 | return m_textureIds[2] - m->m_textureIds[2]; |
270 | } |
271 | |
272 | void updateBlending() { |
273 | setFlag(flags: Blending, on: qFuzzyCompare(p1: m_opacity, p2: qreal(1.0)) ? false : true); |
274 | } |
275 | |
276 | void setCurrentFrame(const QVideoFrame &frame) { |
277 | QMutexLocker lock(&m_frameMutex); |
278 | m_frame = frame; |
279 | } |
280 | |
281 | void bind(); |
282 | void bindTexture(int id, int w, int h, const uchar *bits, GLenum format); |
283 | |
284 | QVideoSurfaceFormat m_format; |
285 | QSize m_textureSize; |
286 | int m_planeCount; |
287 | |
288 | GLuint m_textureIds[3]; |
289 | GLfloat m_planeWidth[3]; |
290 | |
291 | qreal m_opacity; |
292 | QMatrix4x4 m_colorMatrix; |
293 | |
294 | QVideoFrame m_frame; |
295 | QMutex m_frameMutex; |
296 | }; |
297 | |
298 | QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) : |
299 | m_format(format), |
300 | m_opacity(1.0) |
301 | { |
302 | memset(s: m_textureIds, c: 0, n: sizeof(m_textureIds)); |
303 | |
304 | switch (format.pixelFormat()) { |
305 | case QVideoFrame::Format_NV12: |
306 | case QVideoFrame::Format_NV21: |
307 | m_planeCount = 2; |
308 | break; |
309 | case QVideoFrame::Format_YUV420P: |
310 | case QVideoFrame::Format_YV12: |
311 | case QVideoFrame::Format_YUV422P: |
312 | m_planeCount = 3; |
313 | break; |
314 | case QVideoFrame::Format_UYVY: |
315 | case QVideoFrame::Format_YUYV: |
316 | default: |
317 | m_planeCount = 2; |
318 | break; |
319 | } |
320 | |
321 | switch (format.yCbCrColorSpace()) { |
322 | case QVideoSurfaceFormat::YCbCr_JPEG: |
323 | m_colorMatrix = QMatrix4x4( |
324 | 1.0f, 0.000f, 1.402f, -0.701f, |
325 | 1.0f, -0.344f, -0.714f, 0.529f, |
326 | 1.0f, 1.772f, 0.000f, -0.886f, |
327 | 0.0f, 0.000f, 0.000f, 1.0000f); |
328 | break; |
329 | case QVideoSurfaceFormat::YCbCr_BT709: |
330 | case QVideoSurfaceFormat::YCbCr_xvYCC709: |
331 | m_colorMatrix = QMatrix4x4( |
332 | 1.164f, 0.000f, 1.793f, -0.5727f, |
333 | 1.164f, -0.534f, -0.213f, 0.3007f, |
334 | 1.164f, 2.115f, 0.000f, -1.1302f, |
335 | 0.0f, 0.000f, 0.000f, 1.0000f); |
336 | break; |
337 | default: //BT 601: |
338 | m_colorMatrix = QMatrix4x4( |
339 | 1.164f, 0.000f, 1.596f, -0.8708f, |
340 | 1.164f, -0.392f, -0.813f, 0.5296f, |
341 | 1.164f, 2.017f, 0.000f, -1.081f, |
342 | 0.0f, 0.000f, 0.000f, 1.0000f); |
343 | } |
344 | |
345 | setFlag(flags: Blending, on: false); |
346 | } |
347 | |
348 | QSGVideoMaterial_YUV::~QSGVideoMaterial_YUV() |
349 | { |
350 | if (!m_textureSize.isEmpty()) { |
351 | if (QOpenGLContext *current = QOpenGLContext::currentContext()) |
352 | current->functions()->glDeleteTextures(n: m_planeCount, textures: m_textureIds); |
353 | else |
354 | qWarning() << "QSGVideoMaterial_YUV: Cannot obtain GL context, unable to delete textures" ; |
355 | } |
356 | } |
357 | |
358 | void QSGVideoMaterial_YUV::bind() |
359 | { |
360 | QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); |
361 | QSurfaceFormat::OpenGLContextProfile profile = QOpenGLContext::currentContext()->format().profile(); |
362 | |
363 | QMutexLocker lock(&m_frameMutex); |
364 | if (m_frame.isValid()) { |
365 | if (m_frame.map(mode: QAbstractVideoBuffer::ReadOnly)) { |
366 | int fw = m_frame.width(); |
367 | int fh = m_frame.height(); |
368 | |
369 | // Frame has changed size, recreate textures... |
370 | if (m_textureSize != m_frame.size()) { |
371 | if (!m_textureSize.isEmpty()) |
372 | functions->glDeleteTextures(n: m_planeCount, textures: m_textureIds); |
373 | functions->glGenTextures(n: m_planeCount, textures: m_textureIds); |
374 | m_textureSize = m_frame.size(); |
375 | } |
376 | |
377 | GLint previousAlignment; |
378 | const GLenum texFormat1 = (profile == QSurfaceFormat::CoreProfile) ? GL_RED : GL_LUMINANCE; |
379 | const GLenum texFormat2 = (profile == QSurfaceFormat::CoreProfile) ? GL_RG : GL_LUMINANCE_ALPHA; |
380 | |
381 | functions->glGetIntegerv(GL_UNPACK_ALIGNMENT, params: &previousAlignment); |
382 | functions->glPixelStorei(GL_UNPACK_ALIGNMENT, param: 1); |
383 | |
384 | if (m_format.pixelFormat() == QVideoFrame::Format_UYVY |
385 | || m_format.pixelFormat() == QVideoFrame::Format_YUYV) { |
386 | int fw = m_frame.width(); |
387 | |
388 | m_planeWidth[0] = fw; |
389 | // In YUYV texture the UV plane appears with the 1/2 of image and Y width. |
390 | m_planeWidth[1] = fw / 2; |
391 | functions->glActiveTexture(GL_TEXTURE1); |
392 | // Either r,b (YUYV) or g,a (UYVY) values are used as source of UV. |
393 | // Additionally U and V are set per 2 pixels hence only 1/2 of image width is used. |
394 | // Interpreting this properly in shaders allows to not copy or not make conditionals inside shaders, |
395 | // only interpretation of data changes. |
396 | bindTexture(id: m_textureIds[1], w: m_planeWidth[1], h: m_frame.height(), bits: m_frame.bits(), GL_RGBA); |
397 | functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit |
398 | // Either red (YUYV) or alpha (UYVY) values are used as source of Y |
399 | bindTexture(id: m_textureIds[0], w: m_planeWidth[0], h: m_frame.height(), bits: m_frame.bits(), format: texFormat2); |
400 | } else if (m_format.pixelFormat() == QVideoFrame::Format_NV12 |
401 | || m_format.pixelFormat() == QVideoFrame::Format_NV21) { |
402 | const int y = 0; |
403 | const int uv = 1; |
404 | |
405 | m_planeWidth[0] = m_planeWidth[1] = qreal(fw) / m_frame.bytesPerLine(plane: y); |
406 | |
407 | functions->glActiveTexture(GL_TEXTURE1); |
408 | bindTexture(id: m_textureIds[1], w: m_frame.bytesPerLine(plane: uv) / 2, h: fh / 2, bits: m_frame.bits(plane: uv), format: texFormat2); |
409 | functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit |
410 | bindTexture(id: m_textureIds[0], w: m_frame.bytesPerLine(plane: y), h: fh, bits: m_frame.bits(plane: y), format: texFormat1); |
411 | |
412 | } else { // YUV420P || YV12 || YUV422P |
413 | const int y = 0; |
414 | const int u = m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 2 : 1; |
415 | const int v = m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 1 : 2; |
416 | |
417 | m_planeWidth[0] = qreal(fw) / m_frame.bytesPerLine(plane: y); |
418 | m_planeWidth[1] = m_planeWidth[2] = qreal(fw) / (2 * m_frame.bytesPerLine(plane: u)); |
419 | |
420 | const int uvHeight = m_frame.pixelFormat() == QVideoFrame::Format_YUV422P ? fh : fh / 2; |
421 | |
422 | functions->glActiveTexture(GL_TEXTURE1); |
423 | bindTexture(id: m_textureIds[1], w: m_frame.bytesPerLine(plane: u), h: uvHeight, bits: m_frame.bits(plane: u), format: texFormat1); |
424 | functions->glActiveTexture(GL_TEXTURE2); |
425 | bindTexture(id: m_textureIds[2], w: m_frame.bytesPerLine(plane: v), h: uvHeight, bits: m_frame.bits(plane: v), format: texFormat1); |
426 | functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit |
427 | bindTexture(id: m_textureIds[0], w: m_frame.bytesPerLine(plane: y), h: fh, bits: m_frame.bits(plane: y), format: texFormat1); |
428 | } |
429 | |
430 | functions->glPixelStorei(GL_UNPACK_ALIGNMENT, param: previousAlignment); |
431 | m_frame.unmap(); |
432 | } |
433 | |
434 | m_frame = QVideoFrame(); |
435 | } else { |
436 | // Go backwards to finish with GL_TEXTURE0 |
437 | for (int i = m_planeCount - 1; i >= 0; --i) { |
438 | functions->glActiveTexture(GL_TEXTURE0 + i); |
439 | functions->glBindTexture(GL_TEXTURE_2D, texture: m_textureIds[i]); |
440 | } |
441 | } |
442 | } |
443 | |
444 | void QSGVideoMaterial_YUV::bindTexture(int id, int w, int h, const uchar *bits, GLenum format) |
445 | { |
446 | QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions(); |
447 | functions->glBindTexture(GL_TEXTURE_2D, texture: id); |
448 | functions->glTexImage2D(GL_TEXTURE_2D, level: 0, internalformat: format, width: w, height: h, border: 0, format, GL_UNSIGNED_BYTE, pixels: bits); |
449 | // replacement for GL_LUMINANCE_ALPHA in core profile |
450 | if (format == GL_RG) { |
451 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_R, GL_RED); |
452 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_G, GL_RED); |
453 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_B, GL_RED); |
454 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_A, GL_GREEN); |
455 | } |
456 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
457 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
458 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
459 | functions->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
460 | } |
461 | |
462 | QSGVideoNode_YUV::QSGVideoNode_YUV(const QVideoSurfaceFormat &format) : |
463 | m_format(format) |
464 | { |
465 | setFlag(QSGNode::OwnsMaterial); |
466 | m_material = new QSGVideoMaterial_YUV(format); |
467 | setMaterial(m_material); |
468 | } |
469 | |
470 | QSGVideoNode_YUV::~QSGVideoNode_YUV() |
471 | { |
472 | } |
473 | |
474 | void QSGVideoNode_YUV::setCurrentFrame(const QVideoFrame &frame, FrameFlags) |
475 | { |
476 | m_material->setCurrentFrame(frame); |
477 | markDirty(bits: DirtyMaterial); |
478 | } |
479 | |
480 | void QSGVideoMaterialShader_YUV_BiPlanar::updateState(const RenderState &state, |
481 | QSGMaterial *newMaterial, |
482 | QSGMaterial *oldMaterial) |
483 | { |
484 | Q_UNUSED(oldMaterial); |
485 | |
486 | QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial); |
487 | program()->setUniformValue(location: m_id_plane1Texture, value: 0); |
488 | program()->setUniformValue(location: m_id_plane2Texture, value: 1); |
489 | |
490 | mat->bind(); |
491 | |
492 | program()->setUniformValue(location: m_id_colorMatrix, value: mat->m_colorMatrix); |
493 | program()->setUniformValue(location: m_id_plane1Width, value: mat->m_planeWidth[0]); |
494 | program()->setUniformValue(location: m_id_plane2Width, value: mat->m_planeWidth[1]); |
495 | if (state.isOpacityDirty()) { |
496 | mat->m_opacity = state.opacity(); |
497 | program()->setUniformValue(location: m_id_opacity, value: GLfloat(mat->m_opacity)); |
498 | } |
499 | if (state.isMatrixDirty()) |
500 | program()->setUniformValue(location: m_id_matrix, value: state.combinedMatrix()); |
501 | } |
502 | |
503 | void QSGVideoMaterialShader_YUV_TriPlanar::updateState(const RenderState &state, |
504 | QSGMaterial *newMaterial, |
505 | QSGMaterial *oldMaterial) |
506 | { |
507 | QSGVideoMaterialShader_YUV_BiPlanar::updateState(state, newMaterial, oldMaterial); |
508 | |
509 | QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial); |
510 | program()->setUniformValue(location: m_id_plane3Texture, value: 2); |
511 | program()->setUniformValue(location: m_id_plane3Width, value: mat->m_planeWidth[2]); |
512 | } |
513 | |
514 | void QSGVideoMaterialShader_UYVY::updateState(const RenderState &state, |
515 | QSGMaterial *newMaterial, |
516 | QSGMaterial *oldMaterial) |
517 | { |
518 | Q_UNUSED(oldMaterial); |
519 | |
520 | QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial); |
521 | program()->setUniformValue(location: m_id_yTexture, value: 0); |
522 | program()->setUniformValue(location: m_id_uvTexture, value: 1); |
523 | |
524 | mat->bind(); |
525 | |
526 | program()->setUniformValue(location: m_id_colorMatrix, value: mat->m_colorMatrix); |
527 | |
528 | if (state.isOpacityDirty()) { |
529 | mat->m_opacity = state.opacity(); |
530 | program()->setUniformValue(location: m_id_opacity, value: GLfloat(mat->m_opacity)); |
531 | } |
532 | |
533 | if (state.isMatrixDirty()) |
534 | program()->setUniformValue(location: m_id_matrix, value: state.combinedMatrix()); |
535 | } |
536 | |
537 | QT_END_NAMESPACE |
538 | |