1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qvideotexturehelper_p.h"
5#include "qvideoframe.h"
6#include "qabstractvideobuffer_p.h"
7
8#include <qpainter.h>
9#include <qloggingcategory.h>
10
11QT_BEGIN_NAMESPACE
12
13namespace QVideoTextureHelper
14{
15
16static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] = {
17 // Format_Invalid
18 { .nplanes: 0, .strideFactor: 0,
19 .bytesRequired: [](int, int) { return 0; },
20 .textureFormat: { QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat},
21 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
22 },
23 // Format_ARGB8888
24 { .nplanes: 1, .strideFactor: 4,
25 .bytesRequired: [](int stride, int height) { return stride*height; },
26 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
27 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
28 },
29 // Format_ARGB8888_Premultiplied
30 { .nplanes: 1, .strideFactor: 4,
31 .bytesRequired: [](int stride, int height) { return stride*height; },
32 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
33 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
34 },
35 // Format_XRGB8888
36 { .nplanes: 1, .strideFactor: 4,
37 .bytesRequired: [](int stride, int height) { return stride*height; },
38 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
39 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
40 },
41 // Format_BGRA8888
42 { .nplanes: 1, .strideFactor: 4,
43 .bytesRequired: [](int stride, int height) { return stride*height; },
44 .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
45 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
46 },
47 // Format_BGRA8888_Premultiplied
48 { .nplanes: 1, .strideFactor: 4,
49 .bytesRequired: [](int stride, int height) { return stride*height; },
50 .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
51 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
52 },
53 // Format_BGRX8888
54 { .nplanes: 1, .strideFactor: 4,
55 .bytesRequired: [](int stride, int height) { return stride*height; },
56 .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
57 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
58 },
59 // Format_ABGR8888
60 { .nplanes: 1, .strideFactor: 4,
61 .bytesRequired: [](int stride, int height) { return stride*height; },
62 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
63 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
64 },
65 // Format_XBGR8888
66 { .nplanes: 1, .strideFactor: 4,
67 .bytesRequired: [](int stride, int height) { return stride*height; },
68 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
69 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
70 },
71 // Format_RGBA8888
72 { .nplanes: 1, .strideFactor: 4,
73 .bytesRequired: [](int stride, int height) { return stride*height; },
74 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
75 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
76 },
77 // Format_RGBX8888
78 { .nplanes: 1, .strideFactor: 4,
79 .bytesRequired: [](int stride, int height) { return stride*height; },
80 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
81 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
82 },
83 // Format_AYUV
84 { .nplanes: 1, .strideFactor: 4,
85 .bytesRequired: [](int stride, int height) { return stride*height; },
86 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
87 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
88 },
89 // Format_AYUV_Premultiplied
90 { .nplanes: 1, .strideFactor: 4,
91 .bytesRequired: [](int stride, int height) { return stride*height; },
92 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
93 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
94 },
95 // Format_YUV420P
96 { .nplanes: 3, .strideFactor: 1,
97 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
98 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
99 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
100 },
101 // Format_YUV422P
102 { .nplanes: 3, .strideFactor: 1,
103 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
104 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
105 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 1 }, { .x: 2, .y: 1 } }
106 },
107 // Format_YV12
108 { .nplanes: 3, .strideFactor: 1,
109 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
110 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
111 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
112 },
113 // Format_UYVY
114 { .nplanes: 1, .strideFactor: 2,
115 .bytesRequired: [](int stride, int height) { return stride*height; },
116 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
117 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
118 },
119 // Format_YUYV
120 { .nplanes: 1, .strideFactor: 2,
121 .bytesRequired: [](int stride, int height) { return stride*height; },
122 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
123 .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
124 },
125 // Format_NV12
126 { .nplanes: 2, .strideFactor: 1,
127 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
128 .textureFormat: { QRhiTexture::R8, QRhiTexture::RG8, QRhiTexture::UnknownFormat },
129 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
130 },
131 // Format_NV21
132 { .nplanes: 2, .strideFactor: 1,
133 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
134 .textureFormat: { QRhiTexture::R8, QRhiTexture::RG8, QRhiTexture::UnknownFormat },
135 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
136 },
137 // Format_IMC1
138 { .nplanes: 3, .strideFactor: 1,
139 .bytesRequired: [](int stride, int height) {
140 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
141 int h = (height + 15) & ~15;
142 h += 2*(((h/2) + 15) & ~15);
143 return stride * h;
144 },
145 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
146 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
147 },
148 // Format_IMC2
149 { .nplanes: 2, .strideFactor: 1,
150 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
151 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::UnknownFormat },
152 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
153 },
154 // Format_IMC3
155 { .nplanes: 3, .strideFactor: 1,
156 .bytesRequired: [](int stride, int height) {
157 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
158 int h = (height + 15) & ~15;
159 h += 2*(((h/2) + 15) & ~15);
160 return stride * h;
161 },
162 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
163 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
164 },
165 // Format_IMC4
166 { .nplanes: 2, .strideFactor: 1,
167 .bytesRequired: [](int stride, int height) { return 2*stride*height; },
168 .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::UnknownFormat },
169 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } }
170 },
171 // Format_Y8
172 { .nplanes: 1, .strideFactor: 1,
173 .bytesRequired: [](int stride, int height) { return stride*height; },
174 .textureFormat: { QRhiTexture::R8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
175 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
176 },
177 // Format_Y16
178 { .nplanes: 1, .strideFactor: 2,
179 .bytesRequired: [](int stride, int height) { return stride*height; },
180 .textureFormat: { QRhiTexture::R16, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
181 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
182 },
183 // Format_P010
184 { .nplanes: 2, .strideFactor: 2,
185 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
186 .textureFormat: { QRhiTexture::R16, QRhiTexture::RG16, QRhiTexture::UnknownFormat },
187 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
188 },
189 // Format_P016
190 { .nplanes: 2, .strideFactor: 2,
191 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
192 .textureFormat: { QRhiTexture::R16, QRhiTexture::RG16, QRhiTexture::UnknownFormat },
193 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } }
194 },
195 // Format_SamplerExternalOES
196 {
197 .nplanes: 1, .strideFactor: 0,
198 .bytesRequired: [](int, int) { return 0; },
199 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
200 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
201 },
202 // Format_Jpeg
203 { .nplanes: 1, .strideFactor: 4,
204 .bytesRequired: [](int stride, int height) { return stride*height; },
205 .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
206 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
207 },
208 // Format_SamplerRect
209 {
210 .nplanes: 1, .strideFactor: 0,
211 .bytesRequired: [](int, int) { return 0; },
212 .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat },
213 .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } }
214 },
215 // Format_YUV420P10
216 { .nplanes: 3, .strideFactor: 1,
217 .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
218 .textureFormat: { QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 },
219 .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } }
220 },
221};
222
223const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
224{
225 return descriptions + format;
226}
227
228QString vertexShaderFileName(const QVideoFrameFormat &format)
229{
230 auto fmt = format.pixelFormat();
231 Q_UNUSED(fmt);
232
233#if 1//def Q_OS_ANDROID
234 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
235 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
236#endif
237#if 1//def Q_OS_MACOS
238 if (fmt == QVideoFrameFormat::Format_SamplerRect)
239 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
240#endif
241
242 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
243}
244
245QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhiSwapChain::Format surfaceFormat)
246{
247 const char *shader = nullptr;
248 switch (format.pixelFormat()) {
249 case QVideoFrameFormat::Format_Y8:
250 case QVideoFrameFormat::Format_Y16:
251 shader = "y";
252 break;
253 case QVideoFrameFormat::Format_AYUV:
254 case QVideoFrameFormat::Format_AYUV_Premultiplied:
255 shader = "ayuv";
256 break;
257 case QVideoFrameFormat::Format_ARGB8888:
258 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
259 case QVideoFrameFormat::Format_XRGB8888:
260 shader = "argb";
261 break;
262 case QVideoFrameFormat::Format_ABGR8888:
263 case QVideoFrameFormat::Format_XBGR8888:
264 shader = "abgr";
265 break;
266 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
267 shader = "bgra";
268 break;
269 case QVideoFrameFormat::Format_RGBA8888:
270 case QVideoFrameFormat::Format_RGBX8888:
271 case QVideoFrameFormat::Format_BGRA8888:
272 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
273 case QVideoFrameFormat::Format_BGRX8888:
274 shader = "rgba";
275 break;
276 case QVideoFrameFormat::Format_YUV420P:
277 case QVideoFrameFormat::Format_YUV422P:
278 case QVideoFrameFormat::Format_IMC3:
279 shader = "yuv_triplanar";
280 break;
281 case QVideoFrameFormat::Format_YUV420P10:
282 shader = "yuv_triplanar_p10";
283 break;
284 case QVideoFrameFormat::Format_YV12:
285 case QVideoFrameFormat::Format_IMC1:
286 shader = "yvu_triplanar";
287 break;
288 case QVideoFrameFormat::Format_IMC2:
289 shader = "imc2";
290 break;
291 case QVideoFrameFormat::Format_IMC4:
292 shader = "imc4";
293 break;
294 case QVideoFrameFormat::Format_UYVY:
295 shader = "uyvy";
296 break;
297 case QVideoFrameFormat::Format_YUYV:
298 shader = "yuyv";
299 break;
300 case QVideoFrameFormat::Format_P010:
301 case QVideoFrameFormat::Format_P016:
302 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
303 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
304 shader = "nv12_bt2020_pq";
305 break;
306 }
307 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
308 shader = "nv12_bt2020_hlg";
309 break;
310 }
311 // Fall through, should be bt709
312 Q_FALLTHROUGH();
313 case QVideoFrameFormat::Format_NV12:
314 shader = "nv12";
315 break;
316 case QVideoFrameFormat::Format_NV21:
317 shader = "nv21";
318 break;
319 case QVideoFrameFormat::Format_SamplerExternalOES:
320#if 1//def Q_OS_ANDROID
321 shader = "externalsampler";
322 break;
323#endif
324 case QVideoFrameFormat::Format_SamplerRect:
325#if 1//def Q_OS_MACOS
326 shader = "rectsampler_bgra";
327 break;
328#endif
329 // fallthrough
330 case QVideoFrameFormat::Format_Invalid:
331 default:
332 break;
333 }
334 if (!shader)
335 return QString();
336 QString shaderFile = QStringLiteral(":/qt-project.org/multimedia/shaders/") + QString::fromLatin1(ba: shader);
337 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
338 shaderFile += QLatin1String("_linear");
339 shaderFile += QStringLiteral(".frag.qsb");
340 return shaderFile;
341}
342
343// Matrices are calculated from
344// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
345// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
346// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
347//
348// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
349// shaders/colorconvert.glsl for details.
350//
351// Doing the math gives the following (Y, U & V normalized to [0..1] range):
352//
353// Y = a*R + b*G + c*B
354// R = Y + e*V
355// G = Y - c*d/b*U - a*e/b*V
356// B = Y + d*U
357
358// BT2020:
359// a = .2627, b = 0.6780, c = 0.0593
360// d = 1.8814
361// e = 1.4746
362//
363// BT709:
364// a = 0.2126, b = 0.7152, c = 0.0722
365// d = 1.8556
366// e = 1.5748
367//
368// BT601:
369// a = 0.299, b = 0.578, c = 0.114
370// d = 1.42
371// e = 1.772
372//
373static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
374{
375 auto colorSpace = format.colorSpace();
376 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
377 if (format.frameHeight() > 576)
378 // HD video, assume BT709
379 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
380 else
381 // SD video, assume BT601
382 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
383 }
384 switch (colorSpace) {
385 case QVideoFrameFormat::ColorSpace_AdobeRgb:
386 return QMatrix4x4(
387 1.0f, 0.000f, 1.402f, -0.701f,
388 1.0f, -0.344f, -0.714f, 0.529f,
389 1.0f, 1.772f, 0.000f, -0.886f,
390 0.0f, 0.000f, 0.000f, 1.0000f);
391 default:
392 case QVideoFrameFormat::ColorSpace_BT709:
393 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
394 return QMatrix4x4(
395 1.f, 0.000f, 1.5748f, -0.8774f,
396 1.f, -0.187324f, -0.468124f, 0.327724f,
397 1.f, 1.8556f, 0.000f, -0.9278f,
398 0.0f, 0.000f, 0.000f, 1.0000f);
399 return QMatrix4x4(
400 1.1644f, 0.000f, 1.7928f, -0.9731f,
401 1.1644f, -0.2132f, -0.5329f, 0.3015f,
402 1.1644f, 2.1124f, 0.000f, -1.1335f,
403 0.0f, 0.000f, 0.000f, 1.0000f);
404 case QVideoFrameFormat::ColorSpace_BT2020:
405 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
406 return QMatrix4x4(
407 1.f, 0.000f, 1.4746f, -0.7373f,
408 1.f, -0.2801f, -0.91666f, 0.5984f,
409 1.f, 1.8814f, 0.000f, -0.9407f,
410 0.0f, 0.000f, 0.000f, 1.0000f);
411 return QMatrix4x4(
412 1.1644f, 0.000f, 1.6787f, -0.9158f,
413 1.1644f, -0.1874f, -0.6511f, 0.3478f,
414 1.1644f, 2.1418f, 0.000f, -1.1483f,
415 0.0f, 0.000f, 0.000f, 1.0000f);
416 case QVideoFrameFormat::ColorSpace_BT601:
417 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
418 // as those are very close.
419 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
420 return QMatrix4x4(
421 1.f, 0.000f, 1.772f, -0.886f,
422 1.f, -0.1646f, -0.57135f, 0.36795f,
423 1.f, 1.42f, 0.000f, -0.71f,
424 0.0f, 0.000f, 0.000f, 1.0000f);
425 return QMatrix4x4(
426 1.164f, 0.000f, 1.596f, -0.8708f,
427 1.164f, -0.392f, -0.813f, 0.5296f,
428 1.164f, 2.017f, 0.000f, -1.081f,
429 0.0f, 0.000f, 0.000f, 1.0000f);
430 }
431}
432
433#if 0
434static QMatrix4x4 yuvColorCorrectionMatrix(float brightness, float contrast, float hue, float saturation)
435{
436 // Color correction in YUV space is done as follows:
437
438 // The formulas assumes values in range 0-255, and a blackpoint of Y=16, whitepoint of Y=235
439 //
440 // Bightness: b
441 // Contrast: c
442 // Hue: h
443 // Saturation: s
444 //
445 // Y' = (Y - 16)*c + b + 16
446 // U' = ((U - 128)*cos(h) + (V - 128)*sin(h))*c*s + 128
447 // V' = ((V - 128)*cos(h) - (U - 128)*sin(h))*c*s + 128
448 //
449 // For normalized YUV values (0-1 range) as we have them in the pixel shader, this translates to:
450 //
451 // Y' = (Y - .0625)*c + b + .0625
452 // U' = ((U - .5)*cos(h) + (V - .5)*sin(h))*c*s + .5
453 // V' = ((V - .5)*cos(h) - (U - .5)*sin(h))*c*s + .5
454 //
455 // The values need to be clamped to 0-1 after the correction and before converting to RGB
456 // The transformation can be encoded in a 4x4 matrix assuming we have an A component of 1
457
458 float chcs = cos(hue)*contrast*saturation;
459 float shcs = sin(hue)*contrast*saturation;
460 return QMatrix4x4(contrast, 0, 0, .0625*(1 - contrast) + brightness,
461 0, chcs, shcs, .5*(1 - chcs - shcs),
462 0, -shcs, chcs, .5*(1 + shcs - chcs),
463 0, 0, 0, 1);
464}
465#endif
466
467// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
468// or https://ieeexplore.ieee.org/document/7291452
469static float convertPQFromLinear(float sig)
470{
471 const float m1 = 1305.f/8192.f;
472 const float m2 = 2523.f/32.f;
473 const float c1 = 107.f/128.f;
474 const float c2 = 2413.f/128.f;
475 const float c3 = 2392.f/128.f;
476
477 const float SDR_LEVEL = 100.f;
478 sig *= SDR_LEVEL/10000.f;
479 float psig = powf(x: sig, y: m1);
480 float num = c1 + c2*psig;
481 float den = 1 + c3*psig;
482 return powf(x: num/den, y: m2);
483}
484
485float convertHLGFromLinear(float sig)
486{
487 const float a = 0.17883277f;
488 const float b = 0.28466892f; // = 1 - 4a
489 const float c = 0.55991073f; // = 0.5 - a ln(4a)
490
491 if (sig < 1.f/12.f)
492 return sqrtf(x: 3.f*sig);
493 return a*logf(x: 12.f*sig - b) + c;
494}
495
496static float convertSDRFromLinear(float sig)
497{
498 return sig;
499}
500
501void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity, float maxNits)
502{
503#ifndef Q_OS_ANDROID
504 Q_UNUSED(frame);
505#endif
506
507 QMatrix4x4 cmat;
508 switch (format.pixelFormat()) {
509 case QVideoFrameFormat::Format_Invalid:
510 return;
511
512 case QVideoFrameFormat::Format_Jpeg:
513 case QVideoFrameFormat::Format_ARGB8888:
514 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
515 case QVideoFrameFormat::Format_XRGB8888:
516 case QVideoFrameFormat::Format_BGRA8888:
517 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
518 case QVideoFrameFormat::Format_BGRX8888:
519 case QVideoFrameFormat::Format_ABGR8888:
520 case QVideoFrameFormat::Format_XBGR8888:
521 case QVideoFrameFormat::Format_RGBA8888:
522 case QVideoFrameFormat::Format_RGBX8888:
523
524 case QVideoFrameFormat::Format_Y8:
525 case QVideoFrameFormat::Format_Y16:
526 break;
527 case QVideoFrameFormat::Format_IMC1:
528 case QVideoFrameFormat::Format_IMC2:
529 case QVideoFrameFormat::Format_IMC3:
530 case QVideoFrameFormat::Format_IMC4:
531 case QVideoFrameFormat::Format_AYUV:
532 case QVideoFrameFormat::Format_AYUV_Premultiplied:
533 case QVideoFrameFormat::Format_YUV420P:
534 case QVideoFrameFormat::Format_YUV420P10:
535 case QVideoFrameFormat::Format_YUV422P:
536 case QVideoFrameFormat::Format_YV12:
537 case QVideoFrameFormat::Format_UYVY:
538 case QVideoFrameFormat::Format_YUYV:
539 case QVideoFrameFormat::Format_NV12:
540 case QVideoFrameFormat::Format_NV21:
541 case QVideoFrameFormat::Format_P010:
542 case QVideoFrameFormat::Format_P016:
543 cmat = colorMatrix(format);
544 break;
545 case QVideoFrameFormat::Format_SamplerExternalOES:
546 // get Android specific transform for the externalsampler texture
547 cmat = frame.videoBuffer()->externalTextureMatrix();
548 break;
549 case QVideoFrameFormat::Format_SamplerRect:
550 {
551 // Similarly to SamplerExternalOES, the "color matrix" is used here to
552 // transform the texture coordinates. OpenGL texture rectangles expect
553 // non-normalized UVs, so apply a scale to have the fragment shader see
554 // UVs in range [width,height] instead of [0,1].
555 const QSize videoSize = frame.size();
556 cmat.scale(x: videoSize.width(), y: videoSize.height());
557 }
558 break;
559 }
560
561 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
562 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
563 // shader. To reduce computations there, it's precomputed in PQ values here.
564 auto fromLinear = convertSDRFromLinear;
565 switch (format.colorTransfer()) {
566 case QVideoFrameFormat::ColorTransfer_ST2084:
567 fromLinear = convertPQFromLinear;
568 break;
569 case QVideoFrameFormat::ColorTransfer_STD_B67:
570 fromLinear = convertHLGFromLinear;
571 break;
572 default:
573 break;
574 }
575
576 if (dst->size() < qsizetype(sizeof(UniformData)))
577 dst->resize(size: sizeof(UniformData));
578
579 auto ud = reinterpret_cast<UniformData*>(dst->data());
580 memcpy(dest: ud->transformMatrix, src: transform.constData(), n: sizeof(ud->transformMatrix));
581 memcpy(dest: ud->colorMatrix, src: cmat.constData(), n: sizeof(ud->transformMatrix));
582 ud->opacity = opacity;
583 ud->width = float(format.frameWidth());
584 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
585 ud->maxLum = fromLinear(float(maxNits)/100.f);
586}
587
588static bool updateTextureWithMap(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, int plane, std::unique_ptr<QRhiTexture> &tex)
589{
590 if (!frame.map(mode: QVideoFrame::ReadOnly)) {
591 qWarning() << "could not map data of QVideoFrame for upload";
592 return false;
593 }
594
595 QVideoFrameFormat fmt = frame.surfaceFormat();
596 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
597 QSize size = fmt.frameSize();
598
599 const TextureDescription &texDesc = descriptions[pixelFormat];
600 QSize planeSize(size.width()/texDesc.sizeScale[plane].x, size.height()/texDesc.sizeScale[plane].y);
601
602 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.textureFormat[plane];
603 if (!tex) {
604 tex.reset(p: rhi->newTexture(format: texDesc.textureFormat[plane], pixelSize: planeSize, sampleCount: 1, flags: {}));
605 if (!tex) {
606 qWarning(msg: "Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
607 return false;
608 }
609 }
610
611 if (needsRebuild) {
612 tex->setFormat(texDesc.textureFormat[plane]);
613 tex->setPixelSize(planeSize);
614 if (!tex->create()) {
615 qWarning(msg: "Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
616 return false;
617 }
618 }
619
620 QRhiTextureSubresourceUploadDescription subresDesc;
621 QImage image;
622 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
623 image = frame.toImage();
624 image.convertTo(f: QImage::Format_ARGB32);
625 subresDesc.setData(QByteArray((const char *)image.bits(), image.bytesPerLine()*image.height()));
626 subresDesc.setDataStride(image.bytesPerLine());
627 } else {
628 subresDesc.setData(QByteArray::fromRawData(data: (const char *)frame.bits(plane), size: frame.mappedBytes(plane)));
629 subresDesc.setDataStride(frame.bytesPerLine(plane));
630 }
631
632 QRhiTextureUploadEntry entry(0, 0, subresDesc);
633 QRhiTextureUploadDescription desc({ entry });
634 rub->uploadTexture(tex: tex.get(), desc);
635
636 return true;
637}
638
639static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane)
640{
641 QVideoFrameFormat fmt = frame.surfaceFormat();
642 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
643 QSize size = fmt.frameSize();
644
645 const TextureDescription &texDesc = descriptions[pixelFormat];
646 QSize planeSize(size.width()/texDesc.sizeScale[plane].x, size.height()/texDesc.sizeScale[plane].y);
647
648 QRhiTexture::Flags textureFlags = {};
649 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
650#ifdef Q_OS_ANDROID
651 if (rhi->backend() == QRhi::OpenGLES2)
652 textureFlags |= QRhiTexture::ExternalOES;
653#endif
654 }
655 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
656#ifdef Q_OS_MACOS
657 if (rhi->backend() == QRhi::OpenGLES2)
658 textureFlags |= QRhiTexture::TextureRectangleGL;
659#endif
660 }
661
662 if (quint64 handle = frame.videoBuffer()->textureHandle(plane); handle) {
663 std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format: texDesc.textureFormat[plane], pixelSize: planeSize, sampleCount: 1, flags: textureFlags));
664 if (tex->createFrom(src: {.object: handle, .layout: 0}))
665 return tex;
666
667 qWarning(msg: "Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
668 }
669 return {};
670}
671
672class QVideoFrameTexturesArray : public QVideoFrameTextures
673{
674public:
675 using TextureArray = std::array<std::unique_ptr<QRhiTexture>, TextureDescription::maxPlanes>;
676 QVideoFrameTexturesArray(TextureArray &&textures)
677 : m_textures(std::move(textures))
678 {}
679
680 QRhiTexture *texture(uint plane) const override
681 {
682 return plane < std::size(cont: m_textures) ? m_textures[plane].get() : nullptr;
683 }
684
685 TextureArray takeTextures() { return std::move(m_textures); }
686
687private:
688 TextureArray m_textures;
689};
690
691static std::unique_ptr<QVideoFrameTextures> createTexturesFromHandles(const QVideoFrame &frame, QRhi *rhi)
692{
693 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
694 bool ok = true;
695 QVideoFrameTexturesArray::TextureArray textures;
696 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
697 textures[plane] = QVideoTextureHelper::createTextureFromHandle(frame, rhi, plane);
698 ok &= bool(textures[plane]);
699 }
700 if (ok)
701 return std::make_unique<QVideoFrameTexturesArray>(args: std::move(textures));
702 else
703 return {};
704}
705
706std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(const QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, QVideoFrameTextures *old)
707{
708 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
709 QVideoFrameTexturesArray::TextureArray textures;
710 auto oldArray = dynamic_cast<QVideoFrameTexturesArray *>(old);
711 if (oldArray)
712 textures = oldArray->takeTextures();
713
714 bool ok = true;
715 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
716 ok &= updateTextureWithMap(frame, rhi, rub, plane, tex&: textures[plane]);
717 }
718 if (ok)
719 return std::make_unique<QVideoFrameTexturesArray>(args: std::move(textures));
720 else
721 return {};
722}
723
724std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures)
725{
726 QAbstractVideoBuffer *vf = frame.videoBuffer();
727 if (!vf)
728 return {};
729
730 if (auto vft = vf->mapTextures(rhi))
731 return vft;
732
733 if (auto vft = createTexturesFromHandles(frame, rhi))
734 return vft;
735
736 return createTexturesFromMemory(frame, rhi, rub, old: oldTextures.get());
737}
738
739bool SubtitleLayout::update(const QSize &frameSize, QString text)
740{
741 text.replace(before: QLatin1Char('\n'), after: QChar::LineSeparator);
742 if (layout.text() == text && videoSize == frameSize)
743 return false;
744
745 videoSize = frameSize;
746 QFont font;
747 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
748 qreal fontSize = frameSize.height() * 0.045;
749 font.setPointSize(fontSize);
750
751 layout.setText(text);
752 if (text.isEmpty()) {
753 bounds = {};
754 return true;
755 }
756 layout.setFont(font);
757 QTextOption option;
758 option.setUseDesignMetrics(true);
759 option.setAlignment(Qt::AlignCenter);
760 layout.setTextOption(option);
761
762 QFontMetrics metrics(font);
763 int leading = metrics.leading();
764
765 qreal lineWidth = videoSize.width()*.9;
766 qreal margin = videoSize.width()*.05;
767 qreal height = 0;
768 qreal textWidth = 0;
769 layout.beginLayout();
770 while (1) {
771 QTextLine line = layout.createLine();
772 if (!line.isValid())
773 break;
774
775 line.setLineWidth(lineWidth);
776 height += leading;
777 line.setPosition(QPointF(margin, height));
778 height += line.height();
779 textWidth = qMax(a: textWidth, b: line.naturalTextWidth());
780 }
781 layout.endLayout();
782
783 // put subtitles vertically in lower part of the video but not stuck to the bottom
784 int bottomMargin = videoSize.height() / 20;
785 qreal y = videoSize.height() - bottomMargin - height;
786 layout.setPosition(QPointF(0, y));
787 textWidth += fontSize/4.;
788
789 bounds = QRectF((videoSize.width() - textWidth)/2., y, textWidth, height);
790 return true;
791}
792
793void SubtitleLayout::draw(QPainter *painter, const QPointF &translate) const
794{
795 painter->save();
796 painter->translate(offset: translate);
797 painter->setCompositionMode(QPainter::CompositionMode_SourceOver);
798
799 QColor bgColor = Qt::black;
800 bgColor.setAlpha(128);
801 painter->setBrush(bgColor);
802 painter->setPen(Qt::NoPen);
803 painter->drawRect(rect: bounds);
804
805 QTextLayout::FormatRange range;
806 range.start = 0;
807 range.length = layout.text().size();
808 range.format.setForeground(Qt::white);
809 layout.draw(p: painter, pos: {}, selections: { range });
810 painter->restore();
811}
812
813QImage SubtitleLayout::toImage() const
814{
815 auto size = bounds.size().toSize();
816 if (size.isEmpty())
817 return QImage();
818 QImage img(size, QImage::Format_RGBA8888_Premultiplied);
819 QColor bgColor = Qt::black;
820 bgColor.setAlpha(128);
821 img.fill(color: bgColor);
822
823 QPainter painter(&img);
824 painter.translate(offset: -bounds.topLeft());
825 QTextLayout::FormatRange range;
826 range.start = 0;
827 range.length = layout.text().size();
828 range.format.setForeground(Qt::white);
829 layout.draw(p: &painter, pos: {}, selections: { range });
830 return img;
831}
832
833}
834
835QT_END_NAMESPACE
836

source code of qtmultimedia/src/multimedia/video/qvideotexturehelper.cpp