1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qabstractvideobuffer.h"
5
6#include "qvideotexturehelper_p.h"
7#include "qvideoframeconverter_p.h"
8#include "qvideoframe_p.h"
9#include "qvideoframetexturefromsource_p.h"
10#include "private/qmultimediautils_p.h"
11
12#include <QtCore/qfile.h>
13#include <qpainter.h>
14#include <qloggingcategory.h>
15
16QT_BEGIN_NAMESPACE
17
18Q_STATIC_LOGGING_CATEGORY(qLcVideoTextureHelper, "qt.multimedia.video.texturehelper")
19
20namespace QVideoTextureHelper
21{
22
23static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] = {
24 // Format_Invalid
25 { .nplanes: 0, .strideFactor: 0,
26 .bytesRequired: [](int, int) { return 0; },
27 .textureFormat: { TextureDescription::UnknownFormat, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat},
28 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
29 },
30 // Format_ARGB8888
31 { .nplanes: 1, .strideFactor: 4,
32 .bytesRequired: [](int stride, int height) { return stride*height; },
33 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
34 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
35 },
36 // Format_ARGB8888_Premultiplied
37 { .nplanes: 1, .strideFactor: 4,
38 .bytesRequired: [](int stride, int height) { return stride*height; },
39 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
40 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
41 },
42 // Format_XRGB8888
43 { .nplanes: 1, .strideFactor: 4,
44 .bytesRequired: [](int stride, int height) { return stride*height; },
45 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
46 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
47 },
48 // Format_BGRA8888
49 { .nplanes: 1, .strideFactor: 4,
50 .bytesRequired: [](int stride, int height) { return stride*height; },
51 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
52 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
53 },
54 // Format_BGRA8888_Premultiplied
55 { .nplanes: 1, .strideFactor: 4,
56 .bytesRequired: [](int stride, int height) { return stride*height; },
57 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
58 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
59 },
60 // Format_BGRX8888
61 { .nplanes: 1, .strideFactor: 4,
62 .bytesRequired: [](int stride, int height) { return stride*height; },
63 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
64 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
65 },
66 // Format_ABGR8888
67 { .nplanes: 1, .strideFactor: 4,
68 .bytesRequired: [](int stride, int height) { return stride*height; },
69 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
70 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
71 },
72 // Format_XBGR8888
73 { .nplanes: 1, .strideFactor: 4,
74 .bytesRequired: [](int stride, int height) { return stride*height; },
75 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
76 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
77 },
78 // Format_RGBA8888
79 { .nplanes: 1, .strideFactor: 4,
80 .bytesRequired: [](int stride, int height) { return stride*height; },
81 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
82 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
83 },
84 // Format_RGBX8888
85 { .nplanes: 1, .strideFactor: 4,
86 .bytesRequired: [](int stride, int height) { return stride*height; },
87 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
88 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
89 },
90 // Format_AYUV
91 { .nplanes: 1, .strideFactor: 4,
92 .bytesRequired: [](int stride, int height) { return stride*height; },
93 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
94 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
95 },
96 // Format_AYUV_Premultiplied
97 { .nplanes: 1, .strideFactor: 4,
98 .bytesRequired: [](int stride, int height) { return stride*height; },
99 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
100 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
101 },
102 // Format_YUV420P
103 { .nplanes: 3, .strideFactor: 1,
104 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
105 .textureFormat: { TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
106 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
107 },
108 // Format_YUV422P
109 { .nplanes: 3, .strideFactor: 1,
110 .bytesRequired: [](int stride, int height) { return stride * height * 2; },
111 .textureFormat: {TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
112 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 1 }, { .x: 2, .y: 1 } }
113 },
114 // Format_YV12
115 { .nplanes: 3, .strideFactor: 1,
116 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
117 .textureFormat: {TextureDescription::Red_8, TextureDescription::Red_8, TextureDescription::Red_8 },
118 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
119 },
120 // Format_UYVY
121 { .nplanes: 1, .strideFactor: 2,
122 .bytesRequired: [](int stride, int height) { return stride*height; },
123 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
124 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
125 },
126 // Format_YUYV
127 { .nplanes: 1, .strideFactor: 2,
128 .bytesRequired: [](int stride, int height) { return stride*height; },
129 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
130 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
131 },
132 // Format_NV12
133 { .nplanes: 2, .strideFactor: 1,
134 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
135 .textureFormat: { TextureDescription::Red_8, TextureDescription::RG_8, TextureDescription::UnknownFormat },
136 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
137 },
138 // Format_NV21
139 { .nplanes: 2, .strideFactor: 1,
140 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
141 .textureFormat: { TextureDescription::Red_8, TextureDescription::RG_8, TextureDescription::UnknownFormat },
142 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
143 },
144 // Format_IMC1
145 { .nplanes: 3, .strideFactor: 1,
146 .bytesRequired: [](int stride, int height) {
147 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
148 int h = (height + 15) & ~15;
149 h += 2*(((h/2) + 15) & ~15);
150 return stride * h;
151 },
152 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8,TextureDescription::Red_8 },
153 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
154 },
155 // Format_IMC2
156 { .nplanes: 2, .strideFactor: 1,
157 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
158 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8, TextureDescription::UnknownFormat },
159 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
160 },
161 // Format_IMC3
162 { .nplanes: 3, .strideFactor: 1,
163 .bytesRequired: [](int stride, int height) {
164 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
165 int h = (height + 15) & ~15;
166 h += 2*(((h/2) + 15) & ~15);
167 return stride * h;
168 },
169 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8,TextureDescription::Red_8 },
170 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
171 },
172 // Format_IMC4
173 { .nplanes: 2, .strideFactor: 1,
174 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
175 .textureFormat: {TextureDescription::Red_8,TextureDescription::Red_8, TextureDescription::UnknownFormat },
176 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
177 },
178 // Format_Y8
179 { .nplanes: 1, .strideFactor: 1,
180 .bytesRequired: [](int stride, int height) { return stride*height; },
181 .textureFormat: {TextureDescription::Red_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
182 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
183 },
184 // Format_Y16
185 { .nplanes: 1, .strideFactor: 2,
186 .bytesRequired: [](int stride, int height) { return stride*height; },
187 .textureFormat: { TextureDescription::Red_16, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
188 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
189 },
190 // Format_P010
191 { .nplanes: 2, .strideFactor: 2,
192 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
193 .textureFormat: { TextureDescription::Red_16, TextureDescription::RG_16, TextureDescription::UnknownFormat },
194 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
195 },
196 // Format_P016
197 { .nplanes: 2, .strideFactor: 2,
198 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
199 .textureFormat: { TextureDescription::Red_16, TextureDescription::RG_16, TextureDescription::UnknownFormat },
200 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
201 },
202 // Format_SamplerExternalOES
203 {
204 .nplanes: 1, .strideFactor: 0,
205 .bytesRequired: [](int, int) { return 0; },
206 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
207 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
208 },
209 // Format_Jpeg
210 { .nplanes: 1, .strideFactor: 4,
211 .bytesRequired: [](int stride, int height) { return stride*height; },
212 .textureFormat: { TextureDescription::RGBA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
213 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
214 },
215 // Format_SamplerRect
216 {
217 .nplanes: 1, .strideFactor: 0,
218 .bytesRequired: [](int, int) { return 0; },
219 .textureFormat: { TextureDescription::BGRA_8, TextureDescription::UnknownFormat, TextureDescription::UnknownFormat },
220 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
221 },
222 // Format_YUV420P10
223 { .nplanes: 3, .strideFactor: 2,
224 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
225 .textureFormat: { TextureDescription::Red_16, TextureDescription::Red_16, TextureDescription::Red_16 },
226 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
227 },
228};
229
230Q_GLOBAL_STATIC(QList<QRhiTexture::Format>, g_excludedRhiTextureFormats) // for tests only
231
232static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
233{
234 if (g_excludedRhiTextureFormats->contains(t: format))
235 return false;
236 if (!rhi) // consider the format is supported if no rhi specified
237 return true;
238 return rhi->isTextureFormatSupported(format);
239}
240
241QRhiTexture::Format TextureDescription::rhiTextureFormat(int plane, QRhi *rhi) const
242{
243 QRhiTexture::Format preferredFormat = QRhiTexture::UnknownFormat;
244
245 switch (textureFormat[plane]) {
246 case TextureDescription::Red_8:
247 preferredFormat = QRhiTexture::R8;
248 break;
249 case TextureDescription::Red_16:
250 preferredFormat = QRhiTexture::R16;
251 break;
252 case TextureDescription::RG_8:
253 preferredFormat = QRhiTexture::RG8;
254 break;
255 case TextureDescription::RG_16:
256 preferredFormat = QRhiTexture::RG16;
257 break;
258 case TextureDescription::RGBA_8:
259 preferredFormat = QRhiTexture::RGBA8;
260 break;
261 case TextureDescription::BGRA_8:
262 preferredFormat = QRhiTexture::BGRA8;
263 break;
264 case TextureDescription::UnknownFormat:
265 break;
266 default:
267 Q_UNREACHABLE();
268 }
269
270 return resolvedRhiTextureFormat(format: preferredFormat, rhi);
271}
272
273QRhiTexture::Format resolvedRhiTextureFormat(QRhiTexture::Format format, QRhi *rhi)
274{
275 if (isRhiTextureFormatSupported(rhi, format))
276 return format;
277
278 QRhiTexture::Format fallbackFormat;
279 switch (format) {
280 case QRhiTexture::R8:
281 fallbackFormat = resolvedRhiTextureFormat(format: QRhiTexture::RED_OR_ALPHA8, rhi);
282 break;
283 case QRhiTexture::RG8:
284 case QRhiTexture::RG16:
285 fallbackFormat = resolvedRhiTextureFormat(format: QRhiTexture::RGBA8, rhi);
286 break;
287 case QRhiTexture::R16:
288 fallbackFormat = resolvedRhiTextureFormat(format: QRhiTexture::RG8, rhi);
289 break;
290 default:
291 // End fallback chain here, and return UnknownFormat
292 return QRhiTexture::UnknownFormat;
293 }
294
295 if (fallbackFormat == QRhiTexture::UnknownFormat) {
296 // TODO: QTBUG-135911: In some cases rhi claims format and fallbacks are all
297 // unsupported, but when using preferred format video plays fine
298 qCDebug(qLcVideoTextureHelper) << "Cannot determine any usable texture format, using preferred format" << format;
299 return format;
300 }
301
302 qCDebug(qLcVideoTextureHelper) << "Using fallback texture format" << fallbackFormat;
303 return fallbackFormat;
304}
305
306void setExcludedRhiTextureFormats(QList<QRhiTexture::Format> formats)
307{
308 g_excludedRhiTextureFormats->swap(other&: formats);
309}
310
311const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
312{
313 return descriptions + format;
314}
315
316QString vertexShaderFileName(const QVideoFrameFormat &format)
317{
318 auto fmt = format.pixelFormat();
319 Q_UNUSED(fmt);
320
321#if 1//def Q_OS_ANDROID
322 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
323 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
324#endif
325#if 1//def Q_OS_MACOS
326 if (fmt == QVideoFrameFormat::Format_SamplerRect)
327 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
328#endif
329
330 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
331}
332
333QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhi *,
334 QRhiSwapChain::Format surfaceFormat)
335{
336 QString shaderFile;
337 switch (format.pixelFormat()) {
338 case QVideoFrameFormat::Format_Y8:
339 shaderFile = QStringLiteral("y");
340 break;
341 case QVideoFrameFormat::Format_Y16:
342 shaderFile = QStringLiteral("y16");
343 break;
344 case QVideoFrameFormat::Format_AYUV:
345 case QVideoFrameFormat::Format_AYUV_Premultiplied:
346 shaderFile = QStringLiteral("ayuv");
347 break;
348 case QVideoFrameFormat::Format_ARGB8888:
349 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
350 case QVideoFrameFormat::Format_XRGB8888:
351 shaderFile = QStringLiteral("argb");
352 break;
353 case QVideoFrameFormat::Format_ABGR8888:
354 case QVideoFrameFormat::Format_XBGR8888:
355 shaderFile = QStringLiteral("abgr");
356 break;
357 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
358 shaderFile = QStringLiteral("bgra");
359 break;
360 case QVideoFrameFormat::Format_RGBA8888:
361 case QVideoFrameFormat::Format_RGBX8888:
362 case QVideoFrameFormat::Format_BGRA8888:
363 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
364 case QVideoFrameFormat::Format_BGRX8888:
365 shaderFile = QStringLiteral("rgba");
366 break;
367 case QVideoFrameFormat::Format_YUV420P:
368 case QVideoFrameFormat::Format_YUV422P:
369 case QVideoFrameFormat::Format_IMC3:
370 shaderFile = QStringLiteral("yuv_triplanar");
371 break;
372 case QVideoFrameFormat::Format_YUV420P10:
373 shaderFile = QStringLiteral("yuv_triplanar_p10");
374 break;
375 case QVideoFrameFormat::Format_YV12:
376 case QVideoFrameFormat::Format_IMC1:
377 shaderFile = QStringLiteral("yvu_triplanar");
378 break;
379 case QVideoFrameFormat::Format_IMC2:
380 shaderFile = QStringLiteral("imc2");
381 break;
382 case QVideoFrameFormat::Format_IMC4:
383 shaderFile = QStringLiteral("imc4");
384 break;
385 case QVideoFrameFormat::Format_UYVY:
386 shaderFile = QStringLiteral("uyvy");
387 break;
388 case QVideoFrameFormat::Format_YUYV:
389 shaderFile = QStringLiteral("yuyv");
390 break;
391 case QVideoFrameFormat::Format_P010:
392 case QVideoFrameFormat::Format_P016:
393 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
394 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
395 shaderFile = QStringLiteral("nv12_bt2020_pq");
396 break;
397 }
398 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
399 shaderFile = QStringLiteral("nv12_bt2020_hlg");
400 break;
401 }
402 shaderFile = QStringLiteral("p016");
403 break;
404 case QVideoFrameFormat::Format_NV12:
405 shaderFile = QStringLiteral("nv12");
406 break;
407 case QVideoFrameFormat::Format_NV21:
408 shaderFile = QStringLiteral("nv21");
409 break;
410 case QVideoFrameFormat::Format_SamplerExternalOES:
411#if 1//def Q_OS_ANDROID
412 shaderFile = QStringLiteral("externalsampler");
413 break;
414#endif
415 case QVideoFrameFormat::Format_SamplerRect:
416#if 1//def Q_OS_MACOS
417 shaderFile = QStringLiteral("rectsampler_bgra");
418 break;
419#endif
420 // fallthrough
421 case QVideoFrameFormat::Format_Invalid:
422 default:
423 break;
424 }
425
426 if (shaderFile.isEmpty())
427 return QString();
428
429 shaderFile.prepend(v: u":/qt-project.org/multimedia/shaders/");
430
431 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
432 shaderFile.append(v: u"_linear");
433
434 shaderFile.append(v: u".frag.qsb");
435
436 Q_ASSERT_X(QFile::exists(shaderFile), Q_FUNC_INFO,
437 QStringLiteral("Shader file %1 does not exist").arg(shaderFile).toLatin1());
438 return shaderFile;
439}
440
441// Matrices are calculated from
442// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
443// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
444// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
445//
446// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
447// shaders/colorconvert.glsl for details.
448//
449// Doing the math gives the following (Y, U & V normalized to [0..1] range):
450//
451// Y = a*R + b*G + c*B
452// R = Y + e*V
453// G = Y - c*d/b*U - a*e/b*V
454// B = Y + d*U
455
456// BT2020:
457// a = .2627, b = 0.6780, c = 0.0593
458// d = 1.8814
459// e = 1.4746
460//
461// BT709:
462// a = 0.2126, b = 0.7152, c = 0.0722
463// d = 1.8556
464// e = 1.5748
465//
466// BT601:
467// a = 0.299, b = 0.578, c = 0.114
468// d = 1.42
469// e = 1.772
470//
471
472// clang-format off
473static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
474{
475 auto colorSpace = format.colorSpace();
476 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
477 if (format.frameHeight() > 576)
478 // HD video, assume BT709
479 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
480 else
481 // SD video, assume BT601
482 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
483 }
484 switch (colorSpace) {
485 case QVideoFrameFormat::ColorSpace_AdobeRgb:
486 return {
487 1.0f, 0.000f, 1.402f, -0.701f,
488 1.0f, -0.344f, -0.714f, 0.529f,
489 1.0f, 1.772f, 0.000f, -0.886f,
490 0.0f, 0.000f, 0.000f, 1.000f
491 };
492 default:
493 case QVideoFrameFormat::ColorSpace_BT709:
494 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
495 return {
496 1.0f, 0.0f, 1.5748f, -0.790488f,
497 1.0f, -0.187324f, -0.468124f, 0.329010f,
498 1.0f, 1.855600f, 0.0f, -0.931439f,
499 0.0f, 0.0f, 0.0f, 1.0f
500 };
501 return {
502 1.1644f, 0.0000f, 1.7927f, -0.9729f,
503 1.1644f, -0.2132f, -0.5329f, 0.3015f,
504 1.1644f, 2.1124f, 0.0000f, -1.1334f,
505 0.0000f, 0.0000f, 0.0000f, 1.0000f
506 };
507 case QVideoFrameFormat::ColorSpace_BT2020:
508 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
509 return {
510 1.f, 0.0000f, 1.4746f, -0.7402f,
511 1.f, -0.1646f, -0.5714f, 0.3694f,
512 1.f, 1.8814f, 0.000f, -0.9445f,
513 0.0f, 0.0000f, 0.000f, 1.0000f
514 };
515 return {
516 1.1644f, 0.000f, 1.6787f, -0.9157f,
517 1.1644f, -0.1874f, -0.6504f, 0.3475f,
518 1.1644f, 2.1418f, 0.0000f, -1.1483f,
519 0.0000f, 0.0000f, 0.0000f, 1.0000f
520 };
521 case QVideoFrameFormat::ColorSpace_BT601:
522 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
523 // as those are very close.
524 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
525 return {
526 1.f, 0.000f, 1.772f, -0.886f,
527 1.f, -0.1646f, -0.57135f, 0.36795f,
528 1.f, 1.42f, 0.000f, -0.71f,
529 0.0f, 0.000f, 0.000f, 1.0000f
530 };
531 return {
532 1.164f, 0.000f, 1.596f, -0.8708f,
533 1.164f, -0.392f, -0.813f, 0.5296f,
534 1.164f, 2.017f, 0.000f, -1.0810f,
535 0.000f, 0.000f, 0.000f, 1.0000f
536 };
537 }
538}
539// clang-format on
540
541// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
542// or https://ieeexplore.ieee.org/document/7291452
543static float convertPQFromLinear(float sig)
544{
545 const float m1 = 1305.f/8192.f;
546 const float m2 = 2523.f/32.f;
547 const float c1 = 107.f/128.f;
548 const float c2 = 2413.f/128.f;
549 const float c3 = 2392.f/128.f;
550
551 const float SDR_LEVEL = 100.f;
552 sig *= SDR_LEVEL/10000.f;
553 float psig = powf(x: sig, y: m1);
554 float num = c1 + c2*psig;
555 float den = 1 + c3*psig;
556 return powf(x: num/den, y: m2);
557}
558
559float convertHLGFromLinear(float sig)
560{
561 const float a = 0.17883277f;
562 const float b = 0.28466892f; // = 1 - 4a
563 const float c = 0.55991073f; // = 0.5 - a ln(4a)
564
565 if (sig < 1.f/12.f)
566 return sqrtf(x: 3.f*sig);
567 return a*logf(x: 12.f*sig - b) + c;
568}
569
570static float convertSDRFromLinear(float sig)
571{
572 return sig;
573}
574
575void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format,
576 const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity,
577 float maxNits)
578{
579#ifndef Q_OS_ANDROID
580 Q_UNUSED(frame);
581#endif
582
583 QMatrix4x4 cmat;
584 switch (format.pixelFormat()) {
585 case QVideoFrameFormat::Format_Invalid:
586 return;
587
588 case QVideoFrameFormat::Format_Jpeg:
589 case QVideoFrameFormat::Format_ARGB8888:
590 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
591 case QVideoFrameFormat::Format_XRGB8888:
592 case QVideoFrameFormat::Format_BGRA8888:
593 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
594 case QVideoFrameFormat::Format_BGRX8888:
595 case QVideoFrameFormat::Format_ABGR8888:
596 case QVideoFrameFormat::Format_XBGR8888:
597 case QVideoFrameFormat::Format_RGBA8888:
598 case QVideoFrameFormat::Format_RGBX8888:
599
600 case QVideoFrameFormat::Format_Y8:
601 case QVideoFrameFormat::Format_Y16:
602 break;
603 case QVideoFrameFormat::Format_IMC1:
604 case QVideoFrameFormat::Format_IMC2:
605 case QVideoFrameFormat::Format_IMC3:
606 case QVideoFrameFormat::Format_IMC4:
607 case QVideoFrameFormat::Format_AYUV:
608 case QVideoFrameFormat::Format_AYUV_Premultiplied:
609 case QVideoFrameFormat::Format_YUV420P:
610 case QVideoFrameFormat::Format_YUV420P10:
611 case QVideoFrameFormat::Format_YUV422P:
612 case QVideoFrameFormat::Format_YV12:
613 case QVideoFrameFormat::Format_UYVY:
614 case QVideoFrameFormat::Format_YUYV:
615 case QVideoFrameFormat::Format_NV12:
616 case QVideoFrameFormat::Format_NV21:
617 case QVideoFrameFormat::Format_P010:
618 case QVideoFrameFormat::Format_P016:
619 cmat = colorMatrix(format);
620 break;
621 case QVideoFrameFormat::Format_SamplerExternalOES:
622 // get Android specific transform for the externalsampler texture
623 if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
624 cmat = hwBuffer->externalTextureMatrix();
625 break;
626 case QVideoFrameFormat::Format_SamplerRect:
627 {
628 // Similarly to SamplerExternalOES, the "color matrix" is used here to
629 // transform the texture coordinates. OpenGL texture rectangles expect
630 // non-normalized UVs, so apply a scale to have the fragment shader see
631 // UVs in range [width,height] instead of [0,1].
632 const QSize videoSize = frame.size();
633 cmat.scale(x: videoSize.width(), y: videoSize.height());
634 }
635 break;
636 }
637
638 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
639 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
640 // shader. To reduce computations there, it's precomputed in PQ values here.
641 auto fromLinear = convertSDRFromLinear;
642 switch (format.colorTransfer()) {
643 case QVideoFrameFormat::ColorTransfer_ST2084:
644 fromLinear = convertPQFromLinear;
645 break;
646 case QVideoFrameFormat::ColorTransfer_STD_B67:
647 fromLinear = convertHLGFromLinear;
648 break;
649 default:
650 break;
651 }
652
653 if (dst->size() < qsizetype(sizeof(UniformData)))
654 dst->resize(size: sizeof(UniformData));
655
656 auto ud = reinterpret_cast<UniformData*>(dst->data());
657 memcpy(dest: ud->transformMatrix, src: transform.constData(), n: sizeof(ud->transformMatrix));
658 memcpy(dest: ud->colorMatrix, src: cmat.constData(), n: sizeof(ud->transformMatrix));
659 ud->opacity = opacity;
660 ud->width = float(format.frameWidth());
661 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
662 ud->maxLum = fromLinear(float(maxNits)/100.f);
663 const TextureDescription* desc = textureDescription(format: format.pixelFormat());
664
665 // Let's consider using the red component if Red_8 is not used,
666 // it's useful for compatibility the shaders with 16bit formats.
667
668 const bool useRedComponent =
669 !desc->hasTextureFormat(format: TextureDescription::Red_8) ||
670 isRhiTextureFormatSupported(rhi, format: QRhiTexture::R8) ||
671 rhi->isFeatureSupported(feature: QRhi::RedOrAlpha8IsRed);
672 ud->redOrAlphaIndex = useRedComponent ? 0 : 3; // r:0 g:1 b:2 a:3
673 for (int plane = 0; plane < desc->nplanes; ++plane)
674 ud->planeFormats[plane] = desc->rhiTextureFormat(plane, rhi);
675}
676
677enum class UpdateTextureWithMapResult : uint8_t {
678 Failed,
679 UpdatedWithDataCopy,
680 UpdatedWithDataReference
681};
682
683static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi,
684 QRhiResourceUpdateBatch &rub, int plane,
685 std::unique_ptr<QRhiTexture> &tex)
686{
687 Q_ASSERT(frame.isMapped());
688
689 QVideoFrameFormat fmt = frame.surfaceFormat();
690 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
691 QSize size = fmt.frameSize();
692
693 const TextureDescription &texDesc = descriptions[pixelFormat];
694 QSize planeSize = texDesc.rhiPlaneSize(frameSize: size, plane, rhi: &rhi);
695
696 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.rhiTextureFormat(plane, rhi: &rhi);
697 if (!tex) {
698 tex.reset(p: rhi.newTexture(format: texDesc.rhiTextureFormat(plane, rhi: &rhi), pixelSize: planeSize, sampleCount: 1, flags: {}));
699 if (!tex) {
700 qWarning(msg: "Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
701 return UpdateTextureWithMapResult::Failed;
702 }
703 }
704
705 if (needsRebuild) {
706 tex->setFormat(texDesc.rhiTextureFormat(plane, rhi: &rhi));
707 tex->setPixelSize(planeSize);
708 if (!tex->create()) {
709 qWarning(msg: "Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
710 return UpdateTextureWithMapResult::Failed;
711 }
712 }
713
714 auto result = UpdateTextureWithMapResult::UpdatedWithDataCopy;
715
716 QRhiTextureSubresourceUploadDescription subresDesc;
717
718 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
719 Q_ASSERT(plane == 0);
720
721 QImage image;
722
723 // calling QVideoFrame::toImage is not accurate. To be fixed.
724 // frame transformation will be considered later
725 const QVideoFrameFormat surfaceFormat = frame.surfaceFormat();
726
727 const bool hasSurfaceTransform = surfaceFormat.isMirrored()
728 || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop
729 || surfaceFormat.rotation() != QtVideo::Rotation::None;
730
731 if (hasSurfaceTransform)
732 image = qImageFromVideoFrame(frame, transformation: VideoTransformation{});
733 else
734 image = frame.toImage(); // use the frame cache, no surface transforms applied
735
736 image.convertTo(f: QImage::Format_ARGB32);
737 subresDesc.setImage(image);
738
739 } else {
740 // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
741 subresDesc.setData(QByteArray::fromRawData(
742 data: reinterpret_cast<const char *>(frame.bits(plane)), size: frame.mappedBytes(plane)));
743 subresDesc.setDataStride(frame.bytesPerLine(plane));
744 result = UpdateTextureWithMapResult::UpdatedWithDataReference;
745 }
746
747 QRhiTextureUploadEntry entry(0, 0, subresDesc);
748 QRhiTextureUploadDescription desc({ entry });
749 rub.uploadTexture(tex: tex.get(), desc);
750
751 return result;
752}
753
754static std::unique_ptr<QRhiTexture>
755createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi,
756 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
757{
758 const TextureDescription &texDesc = descriptions[pixelFormat];
759 QSize planeSize = texDesc.rhiPlaneSize(frameSize: size, plane, rhi: &rhi);
760
761 QRhiTexture::Flags textureFlags = {};
762 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
763#ifdef Q_OS_ANDROID
764 if (rhi.backend() == QRhi::OpenGLES2)
765 textureFlags |= QRhiTexture::ExternalOES;
766#endif
767 }
768 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
769#ifdef Q_OS_MACOS
770 if (rhi.backend() == QRhi::OpenGLES2)
771 textureFlags |= QRhiTexture::TextureRectangleGL;
772#endif
773 }
774
775 if (quint64 handle = texturesSet.textureHandle(rhi, plane); handle) {
776 std::unique_ptr<QRhiTexture> tex(rhi.newTexture(format: texDesc.rhiTextureFormat(plane, rhi: &rhi), pixelSize: planeSize, sampleCount: 1, flags: textureFlags));
777 if (tex->createFrom(src: {.object: handle, .layout: 0}))
778 return tex;
779
780 qWarning(msg: "Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
781 }
782 return {};
783}
784
785template <typename TexturesType, typename... Args>
786static QVideoFrameTexturesUPtr
787createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet,
788 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
789{
790 const TextureDescription &texDesc = descriptions[pixelFormat];
791 bool ok = true;
792 RhiTextureArray textures;
793 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
794 textures[plane] = QVideoTextureHelper::createTextureFromHandle(texturesSet, rhi,
795 pixelFormat, size, plane);
796 ok &= bool(textures[plane]);
797 }
798 if (ok)
799 return std::make_unique<TexturesType>(std::move(textures), std::forward<Args>(args)...);
800 else
801 return {};
802}
803
804QVideoFrameTexturesUPtr createTexturesFromHandles(QVideoFrameTexturesHandlesUPtr texturesSet,
805 QRhi &rhi,
806 QVideoFrameFormat::PixelFormat pixelFormat,
807 QSize size)
808{
809 if (!texturesSet)
810 return nullptr;
811
812 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
813 return nullptr;
814
815 if (size.isEmpty())
816 return nullptr;
817
818 auto &texturesSetRef = *texturesSet;
819 return createTexturesArray<QVideoFrameTexturesFromHandlesSet>(rhi, texturesSet&: texturesSetRef, pixelFormat,
820 size, args: std::move(texturesSet));
821}
822
823static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi,
824 QRhiResourceUpdateBatch &rub,
825 QVideoFrameTexturesUPtr &oldTextures)
826{
827 if (!frame.map(mode: QVideoFrame::ReadOnly)) {
828 qWarning() << "Cannot map a video frame in ReadOnly mode!";
829 return {};
830 }
831
832 auto unmapFrameGuard = qScopeGuard(f: [&frame] { frame.unmap(); });
833
834 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
835
836 const bool canReuseTextures(dynamic_cast<QVideoFrameTexturesFromMemory*>(oldTextures.get()));
837
838 std::unique_ptr<QVideoFrameTexturesFromMemory> textures(canReuseTextures ?
839 static_cast<QVideoFrameTexturesFromMemory *>(oldTextures.release()) :
840 new QVideoFrameTexturesFromMemory);
841
842 RhiTextureArray& textureArray = textures->textureArray();
843 bool shouldKeepMapping = false;
844 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
845 const auto result = updateTextureWithMap(frame, rhi, rub, plane, tex&: textureArray[plane]);
846 if (result == UpdateTextureWithMapResult::Failed)
847 return {};
848
849 if (result == UpdateTextureWithMapResult::UpdatedWithDataReference)
850 shouldKeepMapping = true;
851 }
852
853 // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
854 textures->setMappedFrame(shouldKeepMapping ? std::move(frame) : QVideoFrame());
855
856 return textures;
857}
858
859QVideoFrameTexturesUPtr createTextures(const QVideoFrame &frame, QRhi &rhi,
860 QRhiResourceUpdateBatch &rub,
861 QVideoFrameTexturesUPtr &oldTextures)
862{
863 if (!frame.isValid())
864 return {};
865
866 auto setSourceFrame = [&frame](QVideoFrameTexturesUPtr result) {
867 result->setSourceFrame(frame);
868 return result;
869 };
870
871 if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
872 if (auto textures = hwBuffer->mapTextures(rhi, oldTextures))
873 return setSourceFrame(std::move(textures));
874
875 QVideoFrameFormat format = frame.surfaceFormat();
876 if (auto textures = createTexturesArray<QVideoFrameTexturesFromRhiTextureArray>(
877 rhi, texturesSet&: *hwBuffer, pixelFormat: format.pixelFormat(), size: format.frameSize()))
878 return setSourceFrame(std::move(textures));
879 }
880
881 if (auto textures = createTexturesFromMemory(frame, rhi, rub, oldTextures))
882 return setSourceFrame(std::move(textures));
883
884 return {};
885}
886
887bool SubtitleLayout::update(const QSize &frameSize, QString text)
888{
889 text.replace(before: QLatin1Char('\n'), after: QChar::LineSeparator);
890 if (layout.text() == text && videoSize == frameSize)
891 return false;
892
893 videoSize = frameSize;
894 QFont font;
895 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
896 qreal fontSize = frameSize.height() * 0.045;
897 font.setPointSize(fontSize);
898
899 layout.setText(text);
900 if (text.isEmpty()) {
901 bounds = {};
902 return true;
903 }
904 layout.setFont(font);
905 QTextOption option;
906 option.setUseDesignMetrics(true);
907 option.setAlignment(Qt::AlignCenter);
908 layout.setTextOption(option);
909
910 QFontMetrics metrics(font);
911 int leading = metrics.leading();
912
913 qreal lineWidth = videoSize.width()*.9;
914 qreal margin = videoSize.width()*.05;
915 qreal height = 0;
916 qreal textWidth = 0;
917 layout.beginLayout();
918 while (1) {
919 QTextLine line = layout.createLine();
920 if (!line.isValid())
921 break;
922
923 line.setLineWidth(lineWidth);
924 height += leading;
925 line.setPosition(QPointF(margin, height));
926 height += line.height();
927 textWidth = qMax(a: textWidth, b: line.naturalTextWidth());
928 }
929 layout.endLayout();
930
931 // put subtitles vertically in lower part of the video but not stuck to the bottom
932 int bottomMargin = videoSize.height() / 20;
933 qreal y = videoSize.height() - bottomMargin - height;
934 layout.setPosition(QPointF(0, y));
935 textWidth += fontSize/4.;
936
937 bounds = QRectF((videoSize.width() - textWidth)/2., y, textWidth, height);
938 return true;
939}
940
941void SubtitleLayout::draw(QPainter *painter, const QPointF &translate) const
942{
943 painter->save();
944 painter->translate(offset: translate);
945 painter->setCompositionMode(QPainter::CompositionMode_SourceOver);
946
947 QColor bgColor = Qt::black;
948 bgColor.setAlpha(128);
949 painter->setBrush(bgColor);
950 painter->setPen(Qt::NoPen);
951 painter->drawRect(rect: bounds);
952
953 QTextLayout::FormatRange range;
954 range.start = 0;
955 range.length = layout.text().size();
956 range.format.setForeground(Qt::white);
957 layout.draw(p: painter, pos: {}, selections: { range });
958 painter->restore();
959}
960
961QImage SubtitleLayout::toImage() const
962{
963 auto size = bounds.size().toSize();
964 if (size.isEmpty())
965 return QImage();
966 QImage img(size, QImage::Format_RGBA8888_Premultiplied);
967 QColor bgColor = Qt::black;
968 bgColor.setAlpha(128);
969 img.fill(color: bgColor);
970
971 QPainter painter(&img);
972 painter.translate(offset: -bounds.topLeft());
973 QTextLayout::FormatRange range;
974 range.start = 0;
975 range.length = layout.text().size();
976 range.format.setForeground(Qt::white);
977 layout.draw(p: &painter, pos: {}, selections: { range });
978 return img;
979}
980
981}
982
983QT_END_NAMESPACE
984

source code of qtmultimedia/src/multimedia/video/qvideotexturehelper.cpp