1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qabstractvideobuffer.h"
5
6#include "qvideotexturehelper_p.h"
7#include "qvideoframeconverter_p.h"
8#include "qvideoframe_p.h"
9#include "qvideoframetexturefromsource_p.h"
10#include "private/qmultimediautils_p.h"
11
12#include <QtCore/qfile.h>
13#include <qpainter.h>
14#include <qloggingcategory.h>
15
16QT_BEGIN_NAMESPACE
17
18namespace QVideoTextureHelper
19{
20
21static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] = {
22 // Format_Invalid
23 { .nplanes: 0, .strideFactor: 0,
24 .bytesRequired: [](int, int) { return 0; },
25 .textureFormat: { TextureDescription::UnknownFormat, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat},
26 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
27 },
28 // Format_ARGB8888
29 { .nplanes: 1, .strideFactor: 4,
30 .bytesRequired: [](int stride, int height) { return stride*height; },
31 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
32 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
33 },
34 // Format_ARGB8888_Premultiplied
35 { .nplanes: 1, .strideFactor: 4,
36 .bytesRequired: [](int stride, int height) { return stride*height; },
37 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
38 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
39 },
40 // Format_XRGB8888
41 { .nplanes: 1, .strideFactor: 4,
42 .bytesRequired: [](int stride, int height) { return stride*height; },
43 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
44 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
45 },
46 // Format_BGRA8888
47 { .nplanes: 1, .strideFactor: 4,
48 .bytesRequired: [](int stride, int height) { return stride*height; },
49 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
50 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
51 },
52 // Format_BGRA8888_Premultiplied
53 { .nplanes: 1, .strideFactor: 4,
54 .bytesRequired: [](int stride, int height) { return stride*height; },
55 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
56 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
57 },
58 // Format_BGRX8888
59 { .nplanes: 1, .strideFactor: 4,
60 .bytesRequired: [](int stride, int height) { return stride*height; },
61 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
62 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
63 },
64 // Format_ABGR8888
65 { .nplanes: 1, .strideFactor: 4,
66 .bytesRequired: [](int stride, int height) { return stride*height; },
67 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
68 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
69 },
70 // Format_XBGR8888
71 { .nplanes: 1, .strideFactor: 4,
72 .bytesRequired: [](int stride, int height) { return stride*height; },
73 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
74 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
75 },
76 // Format_RGBA8888
77 { .nplanes: 1, .strideFactor: 4,
78 .bytesRequired: [](int stride, int height) { return stride*height; },
79 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
80 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
81 },
82 // Format_RGBX8888
83 { .nplanes: 1, .strideFactor: 4,
84 .bytesRequired: [](int stride, int height) { return stride*height; },
85 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
86 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
87 },
88 // Format_AYUV
89 { .nplanes: 1, .strideFactor: 4,
90 .bytesRequired: [](int stride, int height) { return stride*height; },
91 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
92 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
93 },
94 // Format_AYUV_Premultiplied
95 { .nplanes: 1, .strideFactor: 4,
96 .bytesRequired: [](int stride, int height) { return stride*height; },
97 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
98 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
99 },
100 // Format_YUV420P
101 { .nplanes: 3, .strideFactor: 1,
102 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
103 .textureFormat: { TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
104 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
105 },
106 // Format_YUV422P
107 { .nplanes: 3, .strideFactor: 1,
108 .bytesRequired: [](int stride, int height) { return stride * height * 2; },
109 .textureFormat: {TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
110 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 1 }, { .x: 2, .y: 1 } }
111 },
112 // Format_YV12
113 { .nplanes: 3, .strideFactor: 1,
114 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
115 .textureFormat: {TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
116 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
117 },
118 // Format_UYVY
119 { .nplanes: 1, .strideFactor: 2,
120 .bytesRequired: [](int stride, int height) { return stride*height; },
121 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
122 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
123 },
124 // Format_YUYV
125 { .nplanes: 1, .strideFactor: 2,
126 .bytesRequired: [](int stride, int height) { return stride*height; },
127 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
128 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
129 },
130 // Format_NV12
131 { .nplanes: 2, .strideFactor: 1,
132 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
133 .textureFormat: { TextureDescription::Red_8, TextureDescription::RG_8, TextureDescription::UnknownFormat },
134 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
135 },
136 // Format_NV21
137 { .nplanes: 2, .strideFactor: 1,
138 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
139 .textureFormat: { TextureDescription::Red_8, TextureDescription::RG_8, TextureDescription::UnknownFormat },
140 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
141 },
142 // Format_IMC1
143 { .nplanes: 3, .strideFactor: 1,
144 .bytesRequired: [](int stride, int height) {
145 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
146 int h = (height + 15) & ~15;
147 h += 2*(((h/2) + 15) & ~15);
148 return stride * h;
149 },
150 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8,TextureDescription::Red_8 },
151 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
152 },
153 // Format_IMC2
154 { .nplanes: 2, .strideFactor: 1,
155 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
156 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8, TextureDescription::UnknownFormat },
157 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
158 },
159 // Format_IMC3
160 { .nplanes: 3, .strideFactor: 1,
161 .bytesRequired: [](int stride, int height) {
162 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
163 int h = (height + 15) & ~15;
164 h += 2*(((h/2) + 15) & ~15);
165 return stride * h;
166 },
167 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8,TextureDescription::Red_8 },
168 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
169 },
170 // Format_IMC4
171 { .nplanes: 2, .strideFactor: 1,
172 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
173 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8, TextureDescription::UnknownFormat },
174 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
175 },
176 // Format_Y8
177 { .nplanes: 1, .strideFactor: 1,
178 .bytesRequired: [](int stride, int height) { return stride*height; },
179 .textureFormat: {TextureDescription::Red_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
180 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
181 },
182 // Format_Y16
183 { .nplanes: 1, .strideFactor: 2,
184 .bytesRequired: [](int stride, int height) { return stride*height; },
185 .textureFormat: { TextureDescription::Red_16, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
186 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
187 },
188 // Format_P010
189 { .nplanes: 2, .strideFactor: 2,
190 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
191 .textureFormat: { TextureDescription::Red_16, TextureDescription::RG_16, TextureDescription::UnknownFormat },
192 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
193 },
194 // Format_P016
195 { .nplanes: 2, .strideFactor: 2,
196 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
197 .textureFormat: { TextureDescription::Red_16, TextureDescription::RG_16, TextureDescription::UnknownFormat },
198 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
199 },
200 // Format_SamplerExternalOES
201 {
202 .nplanes: 1, .strideFactor: 0,
203 .bytesRequired: [](int, int) { return 0; },
204 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
205 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
206 },
207 // Format_Jpeg
208 { .nplanes: 1, .strideFactor: 4,
209 .bytesRequired: [](int stride, int height) { return stride*height; },
210 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
211 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
212 },
213 // Format_SamplerRect
214 {
215 .nplanes: 1, .strideFactor: 0,
216 .bytesRequired: [](int, int) { return 0; },
217 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
218 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
219 },
220 // Format_YUV420P10
221 { .nplanes: 3, .strideFactor: 2,
222 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
223 .textureFormat: { TextureDescription::Red_16, TextureDescription::Red_16, TextureDescription::Red_16 },
224 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
225 },
226};
227
228Q_GLOBAL_STATIC(QList<QRhiTexture::Format>, g_excludedRhiTextureFormats) // for tests only
229
230static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
231{
232 if (g_excludedRhiTextureFormats->contains(t: format))
233 return false;
234 if (!rhi) // consider the format is supported if no rhi specified
235 return true;
236 return rhi->isTextureFormatSupported(format);
237}
238
239static QRhiTexture::Format
240resolveRhiTextureFormat(QRhi *rhi, QRhiTexture::Format format,
241 QRhiTexture::Format fallback = QRhiTexture::UnknownFormat)
242{
243 if (isRhiTextureFormatSupported(rhi, format))
244 return format;
245
246 if (fallback != QRhiTexture::UnknownFormat && isRhiTextureFormatSupported(rhi, format: fallback))
247 return fallback;
248
249 qWarning() << "Cannot determine any usable texture format, using preferred format" << format;
250 return format;
251}
252
253QRhiTexture::Format TextureDescription::rhiTextureFormat(int plane, QRhi *rhi) const
254{
255 switch (textureFormat[plane]) {
256 case UnknownFormat:
257 return QRhiTexture::UnknownFormat;
258 case Red_8:
259 // NOTE: RED_OR_ALPHA8 requires special alpha shaders if rhi doesn't have feature
260 // RedOrAlpha8IsRed
261 return resolveRhiTextureFormat(rhi, format: QRhiTexture::R8, fallback: QRhiTexture::RED_OR_ALPHA8);
262 case RG_8:
263 return resolveRhiTextureFormat(rhi, format: QRhiTexture::RG8, fallback: QRhiTexture::RGBA8);
264 case RGBA_8:
265 return resolveRhiTextureFormat(rhi, format: QRhiTexture::RGBA8);
266 case BGRA_8:
267 return resolveRhiTextureFormat(rhi, format: QRhiTexture::BGRA8);
268 case Red_16:
269 // TODO: Special handling for 16-bit formats, if we want to support them at all.
270 // Otherwise should give an error.
271 return resolveRhiTextureFormat(rhi, format: QRhiTexture::R16, fallback: QRhiTexture::RG8);
272 case RG_16:
273 return resolveRhiTextureFormat(rhi, format: QRhiTexture::RG16, fallback: QRhiTexture::RGBA8);
274 default:
275 Q_UNREACHABLE();
276 }
277}
278
279void setExcludedRhiTextureFormats(QList<QRhiTexture::Format> formats)
280{
281 g_excludedRhiTextureFormats->swap(other&: formats);
282}
283
284const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
285{
286 return descriptions + format;
287}
288
289QString vertexShaderFileName(const QVideoFrameFormat &format)
290{
291 auto fmt = format.pixelFormat();
292 Q_UNUSED(fmt);
293
294#if 1//def Q_OS_ANDROID
295 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
296 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
297#endif
298#if 1//def Q_OS_MACOS
299 if (fmt == QVideoFrameFormat::Format_SamplerRect)
300 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
301#endif
302
303 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
304}
305
306QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhi *,
307 QRhiSwapChain::Format surfaceFormat)
308{
309 QString shaderFile;
310 switch (format.pixelFormat()) {
311 case QVideoFrameFormat::Format_Y8:
312 shaderFile = QStringLiteral("y");
313 break;
314 case QVideoFrameFormat::Format_Y16:
315 shaderFile = QStringLiteral("y16");
316 break;
317 case QVideoFrameFormat::Format_AYUV:
318 case QVideoFrameFormat::Format_AYUV_Premultiplied:
319 shaderFile = QStringLiteral("ayuv");
320 break;
321 case QVideoFrameFormat::Format_ARGB8888:
322 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
323 case QVideoFrameFormat::Format_XRGB8888:
324 shaderFile = QStringLiteral("argb");
325 break;
326 case QVideoFrameFormat::Format_ABGR8888:
327 case QVideoFrameFormat::Format_XBGR8888:
328 shaderFile = QStringLiteral("abgr");
329 break;
330 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
331 shaderFile = QStringLiteral("bgra");
332 break;
333 case QVideoFrameFormat::Format_RGBA8888:
334 case QVideoFrameFormat::Format_RGBX8888:
335 case QVideoFrameFormat::Format_BGRA8888:
336 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
337 case QVideoFrameFormat::Format_BGRX8888:
338 shaderFile = QStringLiteral("rgba");
339 break;
340 case QVideoFrameFormat::Format_YUV420P:
341 case QVideoFrameFormat::Format_YUV422P:
342 case QVideoFrameFormat::Format_IMC3:
343 shaderFile = QStringLiteral("yuv_triplanar");
344 break;
345 case QVideoFrameFormat::Format_YUV420P10:
346 shaderFile = QStringLiteral("yuv_triplanar_p10");
347 break;
348 case QVideoFrameFormat::Format_YV12:
349 case QVideoFrameFormat::Format_IMC1:
350 shaderFile = QStringLiteral("yvu_triplanar");
351 break;
352 case QVideoFrameFormat::Format_IMC2:
353 shaderFile = QStringLiteral("imc2");
354 break;
355 case QVideoFrameFormat::Format_IMC4:
356 shaderFile = QStringLiteral("imc4");
357 break;
358 case QVideoFrameFormat::Format_UYVY:
359 shaderFile = QStringLiteral("uyvy");
360 break;
361 case QVideoFrameFormat::Format_YUYV:
362 shaderFile = QStringLiteral("yuyv");
363 break;
364 case QVideoFrameFormat::Format_P010:
365 case QVideoFrameFormat::Format_P016:
366 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
367 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
368 shaderFile = QStringLiteral("nv12_bt2020_pq");
369 break;
370 }
371 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
372 shaderFile = QStringLiteral("nv12_bt2020_hlg");
373 break;
374 }
375 shaderFile = QStringLiteral("p016");
376 break;
377 case QVideoFrameFormat::Format_NV12:
378 shaderFile = QStringLiteral("nv12");
379 break;
380 case QVideoFrameFormat::Format_NV21:
381 shaderFile = QStringLiteral("nv21");
382 break;
383 case QVideoFrameFormat::Format_SamplerExternalOES:
384#if 1//def Q_OS_ANDROID
385 shaderFile = QStringLiteral("externalsampler");
386 break;
387#endif
388 case QVideoFrameFormat::Format_SamplerRect:
389#if 1//def Q_OS_MACOS
390 shaderFile = QStringLiteral("rectsampler_bgra");
391 break;
392#endif
393 // fallthrough
394 case QVideoFrameFormat::Format_Invalid:
395 default:
396 break;
397 }
398
399 if (shaderFile.isEmpty())
400 return QString();
401
402 shaderFile.prepend(v: u":/qt-project.org/multimedia/shaders/");
403
404 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
405 shaderFile.append(v: u"_linear");
406
407 shaderFile.append(v: u".frag.qsb");
408
409 Q_ASSERT_X(QFile::exists(shaderFile), Q_FUNC_INFO,
410 QStringLiteral("Shader file %1 does not exist").arg(shaderFile).toLatin1());
411 return shaderFile;
412}
413
414// Matrices are calculated from
415// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
416// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
417// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
418//
419// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
420// shaders/colorconvert.glsl for details.
421//
422// Doing the math gives the following (Y, U & V normalized to [0..1] range):
423//
424// Y = a*R + b*G + c*B
425// R = Y + e*V
426// G = Y - c*d/b*U - a*e/b*V
427// B = Y + d*U
428
429// BT2020:
430// a = .2627, b = 0.6780, c = 0.0593
431// d = 1.8814
432// e = 1.4746
433//
434// BT709:
435// a = 0.2126, b = 0.7152, c = 0.0722
436// d = 1.8556
437// e = 1.5748
438//
439// BT601:
440// a = 0.299, b = 0.578, c = 0.114
441// d = 1.42
442// e = 1.772
443//
444
445// clang-format off
446static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
447{
448 auto colorSpace = format.colorSpace();
449 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
450 if (format.frameHeight() > 576)
451 // HD video, assume BT709
452 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
453 else
454 // SD video, assume BT601
455 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
456 }
457 switch (colorSpace) {
458 case QVideoFrameFormat::ColorSpace_AdobeRgb:
459 return {
460 1.0f, 0.000f, 1.402f, -0.701f,
461 1.0f, -0.344f, -0.714f, 0.529f,
462 1.0f, 1.772f, 0.000f, -0.886f,
463 0.0f, 0.000f, 0.000f, 1.000f
464 };
465 default:
466 case QVideoFrameFormat::ColorSpace_BT709:
467 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
468 return {
469 1.0f, 0.0f, 1.5748f, -0.790488f,
470 1.0f, -0.187324f, -0.468124f, 0.329010f,
471 1.0f, 1.855600f, 0.0f, -0.931439f,
472 0.0f, 0.0f, 0.0f, 1.0f
473 };
474 return {
475 1.1644f, 0.0000f, 1.7927f, -0.9729f,
476 1.1644f, -0.2132f, -0.5329f, 0.3015f,
477 1.1644f, 2.1124f, 0.0000f, -1.1334f,
478 0.0000f, 0.0000f, 0.0000f, 1.0000f
479 };
480 case QVideoFrameFormat::ColorSpace_BT2020:
481 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
482 return {
483 1.f, 0.0000f, 1.4746f, -0.7402f,
484 1.f, -0.1646f, -0.5714f, 0.3694f,
485 1.f, 1.8814f, 0.000f, -0.9445f,
486 0.0f, 0.0000f, 0.000f, 1.0000f
487 };
488 return {
489 1.1644f, 0.000f, 1.6787f, -0.9157f,
490 1.1644f, -0.1874f, -0.6504f, 0.3475f,
491 1.1644f, 2.1418f, 0.0000f, -1.1483f,
492 0.0000f, 0.0000f, 0.0000f, 1.0000f
493 };
494 case QVideoFrameFormat::ColorSpace_BT601:
495 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
496 // as those are very close.
497 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
498 return {
499 1.f, 0.000f, 1.772f, -0.886f,
500 1.f, -0.1646f, -0.57135f, 0.36795f,
501 1.f, 1.42f, 0.000f, -0.71f,
502 0.0f, 0.000f, 0.000f, 1.0000f
503 };
504 return {
505 1.164f, 0.000f, 1.596f, -0.8708f,
506 1.164f, -0.392f, -0.813f, 0.5296f,
507 1.164f, 2.017f, 0.000f, -1.0810f,
508 0.000f, 0.000f, 0.000f, 1.0000f
509 };
510 }
511}
512// clang-format on
513
514// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
515// or https://ieeexplore.ieee.org/document/7291452
516static float convertPQFromLinear(float sig)
517{
518 const float m1 = 1305.f/8192.f;
519 const float m2 = 2523.f/32.f;
520 const float c1 = 107.f/128.f;
521 const float c2 = 2413.f/128.f;
522 const float c3 = 2392.f/128.f;
523
524 const float SDR_LEVEL = 100.f;
525 sig *= SDR_LEVEL/10000.f;
526 float psig = powf(x: sig, y: m1);
527 float num = c1 + c2*psig;
528 float den = 1 + c3*psig;
529 return powf(x: num/den, y: m2);
530}
531
532float convertHLGFromLinear(float sig)
533{
534 const float a = 0.17883277f;
535 const float b = 0.28466892f; // = 1 - 4a
536 const float c = 0.55991073f; // = 0.5 - a ln(4a)
537
538 if (sig < 1.f/12.f)
539 return sqrtf(x: 3.f*sig);
540 return a*logf(x: 12.f*sig - b) + c;
541}
542
543static float convertSDRFromLinear(float sig)
544{
545 return sig;
546}
547
548void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format,
549 const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity,
550 float maxNits)
551{
552#ifndef Q_OS_ANDROID
553 Q_UNUSED(frame);
554#endif
555
556 QMatrix4x4 cmat;
557 switch (format.pixelFormat()) {
558 case QVideoFrameFormat::Format_Invalid:
559 return;
560
561 case QVideoFrameFormat::Format_Jpeg:
562 case QVideoFrameFormat::Format_ARGB8888:
563 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
564 case QVideoFrameFormat::Format_XRGB8888:
565 case QVideoFrameFormat::Format_BGRA8888:
566 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
567 case QVideoFrameFormat::Format_BGRX8888:
568 case QVideoFrameFormat::Format_ABGR8888:
569 case QVideoFrameFormat::Format_XBGR8888:
570 case QVideoFrameFormat::Format_RGBA8888:
571 case QVideoFrameFormat::Format_RGBX8888:
572
573 case QVideoFrameFormat::Format_Y8:
574 case QVideoFrameFormat::Format_Y16:
575 break;
576 case QVideoFrameFormat::Format_IMC1:
577 case QVideoFrameFormat::Format_IMC2:
578 case QVideoFrameFormat::Format_IMC3:
579 case QVideoFrameFormat::Format_IMC4:
580 case QVideoFrameFormat::Format_AYUV:
581 case QVideoFrameFormat::Format_AYUV_Premultiplied:
582 case QVideoFrameFormat::Format_YUV420P:
583 case QVideoFrameFormat::Format_YUV420P10:
584 case QVideoFrameFormat::Format_YUV422P:
585 case QVideoFrameFormat::Format_YV12:
586 case QVideoFrameFormat::Format_UYVY:
587 case QVideoFrameFormat::Format_YUYV:
588 case QVideoFrameFormat::Format_NV12:
589 case QVideoFrameFormat::Format_NV21:
590 case QVideoFrameFormat::Format_P010:
591 case QVideoFrameFormat::Format_P016:
592 cmat = colorMatrix(format);
593 break;
594 case QVideoFrameFormat::Format_SamplerExternalOES:
595 // get Android specific transform for the externalsampler texture
596 if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
597 cmat = hwBuffer->externalTextureMatrix();
598 break;
599 case QVideoFrameFormat::Format_SamplerRect:
600 {
601 // Similarly to SamplerExternalOES, the "color matrix" is used here to
602 // transform the texture coordinates. OpenGL texture rectangles expect
603 // non-normalized UVs, so apply a scale to have the fragment shader see
604 // UVs in range [width,height] instead of [0,1].
605 const QSize videoSize = frame.size();
606 cmat.scale(x: videoSize.width(), y: videoSize.height());
607 }
608 break;
609 }
610
611 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
612 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
613 // shader. To reduce computations there, it's precomputed in PQ values here.
614 auto fromLinear = convertSDRFromLinear;
615 switch (format.colorTransfer()) {
616 case QVideoFrameFormat::ColorTransfer_ST2084:
617 fromLinear = convertPQFromLinear;
618 break;
619 case QVideoFrameFormat::ColorTransfer_STD_B67:
620 fromLinear = convertHLGFromLinear;
621 break;
622 default:
623 break;
624 }
625
626 if (dst->size() < qsizetype(sizeof(UniformData)))
627 dst->resize(size: sizeof(UniformData));
628
629 auto ud = reinterpret_cast<UniformData*>(dst->data());
630 memcpy(dest: ud->transformMatrix, src: transform.constData(), n: sizeof(ud->transformMatrix));
631 memcpy(dest: ud->colorMatrix, src: cmat.constData(), n: sizeof(ud->transformMatrix));
632 ud->opacity = opacity;
633 ud->width = float(format.frameWidth());
634 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
635 ud->maxLum = fromLinear(float(maxNits)/100.f);
636 const TextureDescription* desc = textureDescription(format: format.pixelFormat());
637
638 // Let's consider using the red component if Red_8 is not used,
639 // it's useful for compatibility the shaders with 16bit formats.
640
641 const bool useRedComponent =
642 !desc->hasTextureFormat(format: TextureDescription::Red_8) ||
643 isRhiTextureFormatSupported(rhi, format: QRhiTexture::R8) ||
644 rhi->isFeatureSupported(feature: QRhi::RedOrAlpha8IsRed);
645 ud->redOrAlphaIndex = useRedComponent ? 0 : 3; // r:0 g:1 b:2 a:3
646 for (int plane = 0; plane < desc->nplanes; ++plane)
647 ud->planeFormats[plane] = desc->rhiTextureFormat(plane, rhi);
648}
649
650enum class UpdateTextureWithMapResult : uint8_t {
651 Failed,
652 UpdatedWithDataCopy,
653 UpdatedWithDataReference
654};
655
656static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi,
657 QRhiResourceUpdateBatch &rub, int plane,
658 std::unique_ptr<QRhiTexture> &tex)
659{
660 Q_ASSERT(frame.isMapped());
661
662 QVideoFrameFormat fmt = frame.surfaceFormat();
663 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
664 QSize size = fmt.frameSize();
665
666 const TextureDescription &texDesc = descriptions[pixelFormat];
667 QSize planeSize = texDesc.rhiPlaneSize(frameSize: size, plane, rhi: &rhi);
668
669 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.rhiTextureFormat(plane, rhi: &rhi);
670 if (!tex) {
671 tex.reset(p: rhi.newTexture(format: texDesc.rhiTextureFormat(plane, rhi: &rhi), pixelSize: planeSize, sampleCount: 1, flags: {}));
672 if (!tex) {
673 qWarning(msg: "Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
674 return UpdateTextureWithMapResult::Failed;
675 }
676 }
677
678 if (needsRebuild) {
679 tex->setFormat(texDesc.rhiTextureFormat(plane, rhi: &rhi));
680 tex->setPixelSize(planeSize);
681 if (!tex->create()) {
682 qWarning(msg: "Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
683 return UpdateTextureWithMapResult::Failed;
684 }
685 }
686
687 auto result = UpdateTextureWithMapResult::UpdatedWithDataCopy;
688
689 QRhiTextureSubresourceUploadDescription subresDesc;
690
691 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
692 Q_ASSERT(plane == 0);
693
694 QImage image;
695
696 // calling QVideoFrame::toImage is not accurate. To be fixed.
697 // frame transformation will be considered later
698 const QVideoFrameFormat surfaceFormat = frame.surfaceFormat();
699
700 const bool hasSurfaceTransform = surfaceFormat.isMirrored()
701 || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop
702 || surfaceFormat.rotation() != QtVideo::Rotation::None;
703
704 if (hasSurfaceTransform)
705 image = qImageFromVideoFrame(frame, transformation: VideoTransformation{});
706 else
707 image = frame.toImage(); // use the frame cache, no surface transforms applied
708
709 image.convertTo(f: QImage::Format_ARGB32);
710 subresDesc.setImage(image);
711
712 } else {
713 // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
714 subresDesc.setData(QByteArray::fromRawData(
715 data: reinterpret_cast<const char *>(frame.bits(plane)), size: frame.mappedBytes(plane)));
716 subresDesc.setDataStride(frame.bytesPerLine(plane));
717 result = UpdateTextureWithMapResult::UpdatedWithDataReference;
718 }
719
720 QRhiTextureUploadEntry entry(0, 0, subresDesc);
721 QRhiTextureUploadDescription desc({ entry });
722 rub.uploadTexture(tex: tex.get(), desc);
723
724 return result;
725}
726
727static std::unique_ptr<QRhiTexture>
728createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi,
729 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
730{
731 const TextureDescription &texDesc = descriptions[pixelFormat];
732 QSize planeSize = texDesc.rhiPlaneSize(frameSize: size, plane, rhi: &rhi);
733
734 QRhiTexture::Flags textureFlags = {};
735 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
736#ifdef Q_OS_ANDROID
737 if (rhi.backend() == QRhi::OpenGLES2)
738 textureFlags |= QRhiTexture::ExternalOES;
739#endif
740 }
741 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
742#ifdef Q_OS_MACOS
743 if (rhi.backend() == QRhi::OpenGLES2)
744 textureFlags |= QRhiTexture::TextureRectangleGL;
745#endif
746 }
747
748 if (quint64 handle = texturesSet.textureHandle(rhi, plane); handle) {
749 std::unique_ptr<QRhiTexture> tex(rhi.newTexture(format: texDesc.rhiTextureFormat(plane, rhi: &rhi), pixelSize: planeSize, sampleCount: 1, flags: textureFlags));
750 if (tex->createFrom(src: {.object: handle, .layout: 0}))
751 return tex;
752
753 qWarning(msg: "Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
754 }
755 return {};
756}
757
758template <typename TexturesType, typename... Args>
759static QVideoFrameTexturesUPtr
760createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet,
761 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
762{
763 const TextureDescription &texDesc = descriptions[pixelFormat];
764 bool ok = true;
765 RhiTextureArray textures;
766 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
767 textures[plane] = QVideoTextureHelper::createTextureFromHandle(texturesSet, rhi,
768 pixelFormat, size, plane);
769 ok &= bool(textures[plane]);
770 }
771 if (ok)
772 return std::make_unique<TexturesType>(std::move(textures), std::forward<Args>(args)...);
773 else
774 return {};
775}
776
777QVideoFrameTexturesUPtr createTexturesFromHandles(QVideoFrameTexturesHandlesUPtr texturesSet,
778 QRhi &rhi,
779 QVideoFrameFormat::PixelFormat pixelFormat,
780 QSize size)
781{
782 if (!texturesSet)
783 return nullptr;
784
785 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
786 return nullptr;
787
788 if (size.isEmpty())
789 return nullptr;
790
791 auto &texturesSetRef = *texturesSet;
792 return createTexturesArray<QVideoFrameTexturesFromHandlesSet>(rhi, texturesSet&: texturesSetRef, pixelFormat,
793 size, args: std::move(texturesSet));
794}
795
796static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi,
797 QRhiResourceUpdateBatch &rub,
798 QVideoFrameTexturesUPtr &oldTextures)
799{
800 if (!frame.map(mode: QVideoFrame::ReadOnly)) {
801 qWarning() << "Cannot map a video frame in ReadOnly mode!";
802 return {};
803 }
804
805 auto unmapFrameGuard = qScopeGuard(f: [&frame] { frame.unmap(); });
806
807 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
808
809 const bool canReuseTextures(dynamic_cast<QVideoFrameTexturesFromMemory*>(oldTextures.get()));
810
811 std::unique_ptr<QVideoFrameTexturesFromMemory> textures(canReuseTextures ?
812 static_cast<QVideoFrameTexturesFromMemory *>(oldTextures.release()) :
813 new QVideoFrameTexturesFromMemory);
814
815 RhiTextureArray& textureArray = textures->textureArray();
816 bool shouldKeepMapping = false;
817 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
818 const auto result = updateTextureWithMap(frame, rhi, rub, plane, tex&: textureArray[plane]);
819 if (result == UpdateTextureWithMapResult::Failed)
820 return {};
821
822 if (result == UpdateTextureWithMapResult::UpdatedWithDataReference)
823 shouldKeepMapping = true;
824 }
825
826 // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
827 textures->setMappedFrame(shouldKeepMapping ? std::move(frame) : QVideoFrame());
828
829 return textures;
830}
831
832QVideoFrameTexturesUPtr createTextures(const QVideoFrame &frame, QRhi &rhi,
833 QRhiResourceUpdateBatch &rub,
834 QVideoFrameTexturesUPtr oldTextures)
835{
836 if (!frame.isValid())
837 return {};
838
839 auto setSourceFrame = [&frame](QVideoFrameTexturesUPtr result) {
840 result->setSourceFrame(frame);
841 return result;
842 };
843
844 if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
845 if (auto textures = hwBuffer->mapTextures(rhi, oldTextures))
846 return setSourceFrame(std::move(textures));
847
848 QVideoFrameFormat format = frame.surfaceFormat();
849 if (auto textures = createTexturesArray<QVideoFrameTexturesFromRhiTextureArray>(
850 rhi, texturesSet&: *hwBuffer, pixelFormat: format.pixelFormat(), size: format.frameSize()))
851 return setSourceFrame(std::move(textures));
852 }
853
854 return setSourceFrame(createTexturesFromMemory(frame, rhi, rub, oldTextures));
855}
856
857bool SubtitleLayout::update(const QSize &frameSize, QString text)
858{
859 text.replace(before: QLatin1Char('\n'), after: QChar::LineSeparator);
860 if (layout.text() == text && videoSize == frameSize)
861 return false;
862
863 videoSize = frameSize;
864 QFont font;
865 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
866 qreal fontSize = frameSize.height() * 0.045;
867 font.setPointSize(fontSize);
868
869 layout.setText(text);
870 if (text.isEmpty()) {
871 bounds = {};
872 return true;
873 }
874 layout.setFont(font);
875 QTextOption option;
876 option.setUseDesignMetrics(true);
877 option.setAlignment(Qt::AlignCenter);
878 layout.setTextOption(option);
879
880 QFontMetrics metrics(font);
881 int leading = metrics.leading();
882
883 qreal lineWidth = videoSize.width()*.9;
884 qreal margin = videoSize.width()*.05;
885 qreal height = 0;
886 qreal textWidth = 0;
887 layout.beginLayout();
888 while (1) {
889 QTextLine line = layout.createLine();
890 if (!line.isValid())
891 break;
892
893 line.setLineWidth(lineWidth);
894 height += leading;
895 line.setPosition(QPointF(margin, height));
896 height += line.height();
897 textWidth = qMax(a: textWidth, b: line.naturalTextWidth());
898 }
899 layout.endLayout();
900
901 // put subtitles vertically in lower part of the video but not stuck to the bottom
902 int bottomMargin = videoSize.height() / 20;
903 qreal y = videoSize.height() - bottomMargin - height;
904 layout.setPosition(QPointF(0, y));
905 textWidth += fontSize/4.;
906
907 bounds = QRectF((videoSize.width() - textWidth)/2., y, textWidth, height);
908 return true;
909}
910
911void SubtitleLayout::draw(QPainter *painter, const QPointF &translate) const
912{
913 painter->save();
914 painter->translate(offset: translate);
915 painter->setCompositionMode(QPainter::CompositionMode_SourceOver);
916
917 QColor bgColor = Qt::black;
918 bgColor.setAlpha(128);
919 painter->setBrush(bgColor);
920 painter->setPen(Qt::NoPen);
921 painter->drawRect(rect: bounds);
922
923 QTextLayout::FormatRange range;
924 range.start = 0;
925 range.length = layout.text().size();
926 range.format.setForeground(Qt::white);
927 layout.draw(p: painter, pos: {}, selections: { range });
928 painter->restore();
929}
930
931QImage SubtitleLayout::toImage() const
932{
933 auto size = bounds.size().toSize();
934 if (size.isEmpty())
935 return QImage();
936 QImage img(size, QImage::Format_RGBA8888_Premultiplied);
937 QColor bgColor = Qt::black;
938 bgColor.setAlpha(128);
939 img.fill(color: bgColor);
940
941 QPainter painter(&img);
942 painter.translate(offset: -bounds.topLeft());
943 QTextLayout::FormatRange range;
944 range.start = 0;
945 range.length = layout.text().size();
946 range.format.setForeground(Qt::white);
947 layout.draw(p: &painter, pos: {}, selections: { range });
948 return img;
949}
950
951}
952
953QT_END_NAMESPACE
954

source code of qtmultimedia/src/multimedia/video/qvideotexturehelper.cpp