1 | // Copyright (C) 2021 The Qt Company Ltd. |
---|---|
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qabstractvideobuffer.h" |
5 | |
6 | #include "qvideotexturehelper_p.h" |
7 | #include "qvideoframeconverter_p.h" |
8 | #include "qvideoframe_p.h" |
9 | #include "private/qmultimediautils_p.h" |
10 | |
11 | #include <qpainter.h> |
12 | #include <qloggingcategory.h> |
13 | |
14 | QT_BEGIN_NAMESPACE |
15 | |
16 | namespace QVideoTextureHelper |
17 | { |
18 | |
19 | static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] = { |
20 | // Format_Invalid |
21 | { .nplanes: 0, .strideFactor: 0, |
22 | .bytesRequired: [](int, int) { return 0; }, |
23 | .textureFormat: { QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat}, |
24 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
25 | }, |
26 | // Format_ARGB8888 |
27 | { .nplanes: 1, .strideFactor: 4, |
28 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
29 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
30 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
31 | }, |
32 | // Format_ARGB8888_Premultiplied |
33 | { .nplanes: 1, .strideFactor: 4, |
34 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
35 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
36 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
37 | }, |
38 | // Format_XRGB8888 |
39 | { .nplanes: 1, .strideFactor: 4, |
40 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
41 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
42 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
43 | }, |
44 | // Format_BGRA8888 |
45 | { .nplanes: 1, .strideFactor: 4, |
46 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
47 | .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
48 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
49 | }, |
50 | // Format_BGRA8888_Premultiplied |
51 | { .nplanes: 1, .strideFactor: 4, |
52 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
53 | .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
54 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
55 | }, |
56 | // Format_BGRX8888 |
57 | { .nplanes: 1, .strideFactor: 4, |
58 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
59 | .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
60 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
61 | }, |
62 | // Format_ABGR8888 |
63 | { .nplanes: 1, .strideFactor: 4, |
64 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
65 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
66 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
67 | }, |
68 | // Format_XBGR8888 |
69 | { .nplanes: 1, .strideFactor: 4, |
70 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
71 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
72 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
73 | }, |
74 | // Format_RGBA8888 |
75 | { .nplanes: 1, .strideFactor: 4, |
76 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
77 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
78 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
79 | }, |
80 | // Format_RGBX8888 |
81 | { .nplanes: 1, .strideFactor: 4, |
82 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
83 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
84 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
85 | }, |
86 | // Format_AYUV |
87 | { .nplanes: 1, .strideFactor: 4, |
88 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
89 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
90 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
91 | }, |
92 | // Format_AYUV_Premultiplied |
93 | { .nplanes: 1, .strideFactor: 4, |
94 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
95 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
96 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
97 | }, |
98 | // Format_YUV420P |
99 | { .nplanes: 3, .strideFactor: 1, |
100 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
101 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 }, |
102 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } } |
103 | }, |
104 | // Format_YUV422P |
105 | { .nplanes: 3, .strideFactor: 1, |
106 | .bytesRequired: [](int stride, int height) { return stride * height * 2; }, |
107 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 }, |
108 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 1 }, { .x: 2, .y: 1 } } |
109 | }, |
110 | // Format_YV12 |
111 | { .nplanes: 3, .strideFactor: 1, |
112 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
113 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 }, |
114 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } } |
115 | }, |
116 | // Format_UYVY |
117 | { .nplanes: 1, .strideFactor: 2, |
118 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
119 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
120 | .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
121 | }, |
122 | // Format_YUYV |
123 | { .nplanes: 1, .strideFactor: 2, |
124 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
125 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
126 | .sizeScale: { { .x: 2, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
127 | }, |
128 | // Format_NV12 |
129 | { .nplanes: 2, .strideFactor: 1, |
130 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
131 | .textureFormat: { QRhiTexture::R8, QRhiTexture::RG8, QRhiTexture::UnknownFormat }, |
132 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } } |
133 | }, |
134 | // Format_NV21 |
135 | { .nplanes: 2, .strideFactor: 1, |
136 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
137 | .textureFormat: { QRhiTexture::R8, QRhiTexture::RG8, QRhiTexture::UnknownFormat }, |
138 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } } |
139 | }, |
140 | // Format_IMC1 |
141 | { .nplanes: 3, .strideFactor: 1, |
142 | .bytesRequired: [](int stride, int height) { |
143 | // IMC1 requires that U and V components are aligned on a multiple of 16 lines |
144 | int h = (height + 15) & ~15; |
145 | h += 2*(((h/2) + 15) & ~15); |
146 | return stride * h; |
147 | }, |
148 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 }, |
149 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } } |
150 | }, |
151 | // Format_IMC2 |
152 | { .nplanes: 2, .strideFactor: 1, |
153 | .bytesRequired: [](int stride, int height) { return 2*stride*height; }, |
154 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::UnknownFormat }, |
155 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } } |
156 | }, |
157 | // Format_IMC3 |
158 | { .nplanes: 3, .strideFactor: 1, |
159 | .bytesRequired: [](int stride, int height) { |
160 | // IMC3 requires that U and V components are aligned on a multiple of 16 lines |
161 | int h = (height + 15) & ~15; |
162 | h += 2*(((h/2) + 15) & ~15); |
163 | return stride * h; |
164 | }, |
165 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 }, |
166 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } } |
167 | }, |
168 | // Format_IMC4 |
169 | { .nplanes: 2, .strideFactor: 1, |
170 | .bytesRequired: [](int stride, int height) { return 2*stride*height; }, |
171 | .textureFormat: { QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::UnknownFormat }, |
172 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 2 }, { .x: 1, .y: 1 } } |
173 | }, |
174 | // Format_Y8 |
175 | { .nplanes: 1, .strideFactor: 1, |
176 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
177 | .textureFormat: { QRhiTexture::R8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
178 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
179 | }, |
180 | // Format_Y16 |
181 | { .nplanes: 1, .strideFactor: 2, |
182 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
183 | .textureFormat: { QRhiTexture::R16, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
184 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
185 | }, |
186 | // Format_P010 |
187 | { .nplanes: 2, .strideFactor: 2, |
188 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
189 | .textureFormat: { QRhiTexture::R16, QRhiTexture::RG16, QRhiTexture::UnknownFormat }, |
190 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } } |
191 | }, |
192 | // Format_P016 |
193 | { .nplanes: 2, .strideFactor: 2, |
194 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
195 | .textureFormat: { QRhiTexture::R16, QRhiTexture::RG16, QRhiTexture::UnknownFormat }, |
196 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 1, .y: 1 } } |
197 | }, |
198 | // Format_SamplerExternalOES |
199 | { |
200 | .nplanes: 1, .strideFactor: 0, |
201 | .bytesRequired: [](int, int) { return 0; }, |
202 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
203 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
204 | }, |
205 | // Format_Jpeg |
206 | { .nplanes: 1, .strideFactor: 4, |
207 | .bytesRequired: [](int stride, int height) { return stride*height; }, |
208 | .textureFormat: { QRhiTexture::RGBA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
209 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
210 | }, |
211 | // Format_SamplerRect |
212 | { |
213 | .nplanes: 1, .strideFactor: 0, |
214 | .bytesRequired: [](int, int) { return 0; }, |
215 | .textureFormat: { QRhiTexture::BGRA8, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat }, |
216 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 1, .y: 1 }, { .x: 1, .y: 1 } } |
217 | }, |
218 | // Format_YUV420P10 |
219 | { .nplanes: 3, .strideFactor: 2, |
220 | .bytesRequired: [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); }, |
221 | .textureFormat: { QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 }, |
222 | .sizeScale: { { .x: 1, .y: 1 }, { .x: 2, .y: 2 }, { .x: 2, .y: 2 } } |
223 | }, |
224 | }; |
225 | |
226 | const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format) |
227 | { |
228 | return descriptions + format; |
229 | } |
230 | |
231 | QString vertexShaderFileName(const QVideoFrameFormat &format) |
232 | { |
233 | auto fmt = format.pixelFormat(); |
234 | Q_UNUSED(fmt); |
235 | |
236 | #if 1//def Q_OS_ANDROID |
237 | if (fmt == QVideoFrameFormat::Format_SamplerExternalOES) |
238 | return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb"); |
239 | #endif |
240 | #if 1//def Q_OS_MACOS |
241 | if (fmt == QVideoFrameFormat::Format_SamplerRect) |
242 | return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb"); |
243 | #endif |
244 | |
245 | return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb"); |
246 | } |
247 | |
248 | QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhiSwapChain::Format surfaceFormat) |
249 | { |
250 | const char *shader = nullptr; |
251 | switch (format.pixelFormat()) { |
252 | case QVideoFrameFormat::Format_Y8: |
253 | case QVideoFrameFormat::Format_Y16: |
254 | shader = "y"; |
255 | break; |
256 | case QVideoFrameFormat::Format_AYUV: |
257 | case QVideoFrameFormat::Format_AYUV_Premultiplied: |
258 | shader = "ayuv"; |
259 | break; |
260 | case QVideoFrameFormat::Format_ARGB8888: |
261 | case QVideoFrameFormat::Format_ARGB8888_Premultiplied: |
262 | case QVideoFrameFormat::Format_XRGB8888: |
263 | shader = "argb"; |
264 | break; |
265 | case QVideoFrameFormat::Format_ABGR8888: |
266 | case QVideoFrameFormat::Format_XBGR8888: |
267 | shader = "abgr"; |
268 | break; |
269 | case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture |
270 | shader = "bgra"; |
271 | break; |
272 | case QVideoFrameFormat::Format_RGBA8888: |
273 | case QVideoFrameFormat::Format_RGBX8888: |
274 | case QVideoFrameFormat::Format_BGRA8888: |
275 | case QVideoFrameFormat::Format_BGRA8888_Premultiplied: |
276 | case QVideoFrameFormat::Format_BGRX8888: |
277 | shader = "rgba"; |
278 | break; |
279 | case QVideoFrameFormat::Format_YUV420P: |
280 | case QVideoFrameFormat::Format_YUV422P: |
281 | case QVideoFrameFormat::Format_IMC3: |
282 | shader = "yuv_triplanar"; |
283 | break; |
284 | case QVideoFrameFormat::Format_YUV420P10: |
285 | shader = "yuv_triplanar_p10"; |
286 | break; |
287 | case QVideoFrameFormat::Format_YV12: |
288 | case QVideoFrameFormat::Format_IMC1: |
289 | shader = "yvu_triplanar"; |
290 | break; |
291 | case QVideoFrameFormat::Format_IMC2: |
292 | shader = "imc2"; |
293 | break; |
294 | case QVideoFrameFormat::Format_IMC4: |
295 | shader = "imc4"; |
296 | break; |
297 | case QVideoFrameFormat::Format_UYVY: |
298 | shader = "uyvy"; |
299 | break; |
300 | case QVideoFrameFormat::Format_YUYV: |
301 | shader = "yuyv"; |
302 | break; |
303 | case QVideoFrameFormat::Format_P010: |
304 | case QVideoFrameFormat::Format_P016: |
305 | // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel |
306 | if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) { |
307 | shader = "nv12_bt2020_pq"; |
308 | break; |
309 | } |
310 | if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) { |
311 | shader = "nv12_bt2020_hlg"; |
312 | break; |
313 | } |
314 | // Fall through, should be bt709 |
315 | Q_FALLTHROUGH(); |
316 | case QVideoFrameFormat::Format_NV12: |
317 | shader = "nv12"; |
318 | break; |
319 | case QVideoFrameFormat::Format_NV21: |
320 | shader = "nv21"; |
321 | break; |
322 | case QVideoFrameFormat::Format_SamplerExternalOES: |
323 | #if 1//def Q_OS_ANDROID |
324 | shader = "externalsampler"; |
325 | break; |
326 | #endif |
327 | case QVideoFrameFormat::Format_SamplerRect: |
328 | #if 1//def Q_OS_MACOS |
329 | shader = "rectsampler_bgra"; |
330 | break; |
331 | #endif |
332 | // fallthrough |
333 | case QVideoFrameFormat::Format_Invalid: |
334 | default: |
335 | break; |
336 | } |
337 | if (!shader) |
338 | return QString(); |
339 | QString shaderFile = QStringLiteral(":/qt-project.org/multimedia/shaders/") + QString::fromLatin1(ba: shader); |
340 | if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear) |
341 | shaderFile += QLatin1String("_linear"); |
342 | shaderFile += QStringLiteral(".frag.qsb"); |
343 | return shaderFile; |
344 | } |
345 | |
346 | // Matrices are calculated from |
347 | // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf |
348 | // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf |
349 | // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf |
350 | // |
351 | // For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see |
352 | // shaders/colorconvert.glsl for details. |
353 | // |
354 | // Doing the math gives the following (Y, U & V normalized to [0..1] range): |
355 | // |
356 | // Y = a*R + b*G + c*B |
357 | // R = Y + e*V |
358 | // G = Y - c*d/b*U - a*e/b*V |
359 | // B = Y + d*U |
360 | |
361 | // BT2020: |
362 | // a = .2627, b = 0.6780, c = 0.0593 |
363 | // d = 1.8814 |
364 | // e = 1.4746 |
365 | // |
366 | // BT709: |
367 | // a = 0.2126, b = 0.7152, c = 0.0722 |
368 | // d = 1.8556 |
369 | // e = 1.5748 |
370 | // |
371 | // BT601: |
372 | // a = 0.299, b = 0.578, c = 0.114 |
373 | // d = 1.42 |
374 | // e = 1.772 |
375 | // |
376 | |
377 | // clang-format off |
378 | static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format) |
379 | { |
380 | auto colorSpace = format.colorSpace(); |
381 | if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) { |
382 | if (format.frameHeight() > 576) |
383 | // HD video, assume BT709 |
384 | colorSpace = QVideoFrameFormat::ColorSpace_BT709; |
385 | else |
386 | // SD video, assume BT601 |
387 | colorSpace = QVideoFrameFormat::ColorSpace_BT601; |
388 | } |
389 | switch (colorSpace) { |
390 | case QVideoFrameFormat::ColorSpace_AdobeRgb: |
391 | return { |
392 | 1.0f, 0.000f, 1.402f, -0.701f, |
393 | 1.0f, -0.344f, -0.714f, 0.529f, |
394 | 1.0f, 1.772f, 0.000f, -0.886f, |
395 | 0.0f, 0.000f, 0.000f, 1.000f |
396 | }; |
397 | default: |
398 | case QVideoFrameFormat::ColorSpace_BT709: |
399 | if (format.colorRange() == QVideoFrameFormat::ColorRange_Full) |
400 | return { |
401 | 1.0f, 0.0f, 1.5748f, -0.790488f, |
402 | 1.0f, -0.187324f, -0.468124f, 0.329010f, |
403 | 1.0f, 1.855600f, 0.0f, -0.931439f, |
404 | 0.0f, 0.0f, 0.0f, 1.0f |
405 | }; |
406 | return { |
407 | 1.1644f, 0.0000f, 1.7927f, -0.9729f, |
408 | 1.1644f, -0.2132f, -0.5329f, 0.3015f, |
409 | 1.1644f, 2.1124f, 0.0000f, -1.1334f, |
410 | 0.0000f, 0.0000f, 0.0000f, 1.0000f |
411 | }; |
412 | case QVideoFrameFormat::ColorSpace_BT2020: |
413 | if (format.colorRange() == QVideoFrameFormat::ColorRange_Full) |
414 | return { |
415 | 1.f, 0.0000f, 1.4746f, -0.7402f, |
416 | 1.f, -0.1646f, -0.5714f, 0.3694f, |
417 | 1.f, 1.8814f, 0.000f, -0.9445f, |
418 | 0.0f, 0.0000f, 0.000f, 1.0000f |
419 | }; |
420 | return { |
421 | 1.1644f, 0.000f, 1.6787f, -0.9157f, |
422 | 1.1644f, -0.1874f, -0.6504f, 0.3475f, |
423 | 1.1644f, 2.1418f, 0.0000f, -1.1483f, |
424 | 0.0000f, 0.0000f, 0.0000f, 1.0000f |
425 | }; |
426 | case QVideoFrameFormat::ColorSpace_BT601: |
427 | // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion |
428 | // as those are very close. |
429 | if (format.colorRange() == QVideoFrameFormat::ColorRange_Full) |
430 | return { |
431 | 1.f, 0.000f, 1.772f, -0.886f, |
432 | 1.f, -0.1646f, -0.57135f, 0.36795f, |
433 | 1.f, 1.42f, 0.000f, -0.71f, |
434 | 0.0f, 0.000f, 0.000f, 1.0000f |
435 | }; |
436 | return { |
437 | 1.164f, 0.000f, 1.596f, -0.8708f, |
438 | 1.164f, -0.392f, -0.813f, 0.5296f, |
439 | 1.164f, 2.017f, 0.000f, -1.0810f, |
440 | 0.000f, 0.000f, 0.000f, 1.0000f |
441 | }; |
442 | } |
443 | } |
444 | // clang-format on |
445 | |
446 | // PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer |
447 | // or https://ieeexplore.ieee.org/document/7291452 |
448 | static float convertPQFromLinear(float sig) |
449 | { |
450 | const float m1 = 1305.f/8192.f; |
451 | const float m2 = 2523.f/32.f; |
452 | const float c1 = 107.f/128.f; |
453 | const float c2 = 2413.f/128.f; |
454 | const float c3 = 2392.f/128.f; |
455 | |
456 | const float SDR_LEVEL = 100.f; |
457 | sig *= SDR_LEVEL/10000.f; |
458 | float psig = powf(x: sig, y: m1); |
459 | float num = c1 + c2*psig; |
460 | float den = 1 + c3*psig; |
461 | return powf(x: num/den, y: m2); |
462 | } |
463 | |
464 | float convertHLGFromLinear(float sig) |
465 | { |
466 | const float a = 0.17883277f; |
467 | const float b = 0.28466892f; // = 1 - 4a |
468 | const float c = 0.55991073f; // = 0.5 - a ln(4a) |
469 | |
470 | if (sig < 1.f/12.f) |
471 | return sqrtf(x: 3.f*sig); |
472 | return a*logf(x: 12.f*sig - b) + c; |
473 | } |
474 | |
475 | static float convertSDRFromLinear(float sig) |
476 | { |
477 | return sig; |
478 | } |
479 | |
480 | void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity, float maxNits) |
481 | { |
482 | #ifndef Q_OS_ANDROID |
483 | Q_UNUSED(frame); |
484 | #endif |
485 | |
486 | QMatrix4x4 cmat; |
487 | switch (format.pixelFormat()) { |
488 | case QVideoFrameFormat::Format_Invalid: |
489 | return; |
490 | |
491 | case QVideoFrameFormat::Format_Jpeg: |
492 | case QVideoFrameFormat::Format_ARGB8888: |
493 | case QVideoFrameFormat::Format_ARGB8888_Premultiplied: |
494 | case QVideoFrameFormat::Format_XRGB8888: |
495 | case QVideoFrameFormat::Format_BGRA8888: |
496 | case QVideoFrameFormat::Format_BGRA8888_Premultiplied: |
497 | case QVideoFrameFormat::Format_BGRX8888: |
498 | case QVideoFrameFormat::Format_ABGR8888: |
499 | case QVideoFrameFormat::Format_XBGR8888: |
500 | case QVideoFrameFormat::Format_RGBA8888: |
501 | case QVideoFrameFormat::Format_RGBX8888: |
502 | |
503 | case QVideoFrameFormat::Format_Y8: |
504 | case QVideoFrameFormat::Format_Y16: |
505 | break; |
506 | case QVideoFrameFormat::Format_IMC1: |
507 | case QVideoFrameFormat::Format_IMC2: |
508 | case QVideoFrameFormat::Format_IMC3: |
509 | case QVideoFrameFormat::Format_IMC4: |
510 | case QVideoFrameFormat::Format_AYUV: |
511 | case QVideoFrameFormat::Format_AYUV_Premultiplied: |
512 | case QVideoFrameFormat::Format_YUV420P: |
513 | case QVideoFrameFormat::Format_YUV420P10: |
514 | case QVideoFrameFormat::Format_YUV422P: |
515 | case QVideoFrameFormat::Format_YV12: |
516 | case QVideoFrameFormat::Format_UYVY: |
517 | case QVideoFrameFormat::Format_YUYV: |
518 | case QVideoFrameFormat::Format_NV12: |
519 | case QVideoFrameFormat::Format_NV21: |
520 | case QVideoFrameFormat::Format_P010: |
521 | case QVideoFrameFormat::Format_P016: |
522 | cmat = colorMatrix(format); |
523 | break; |
524 | case QVideoFrameFormat::Format_SamplerExternalOES: |
525 | // get Android specific transform for the externalsampler texture |
526 | if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame)) |
527 | cmat = hwBuffer->externalTextureMatrix(); |
528 | break; |
529 | case QVideoFrameFormat::Format_SamplerRect: |
530 | { |
531 | // Similarly to SamplerExternalOES, the "color matrix" is used here to |
532 | // transform the texture coordinates. OpenGL texture rectangles expect |
533 | // non-normalized UVs, so apply a scale to have the fragment shader see |
534 | // UVs in range [width,height] instead of [0,1]. |
535 | const QSize videoSize = frame.size(); |
536 | cmat.scale(x: videoSize.width(), y: videoSize.height()); |
537 | } |
538 | break; |
539 | } |
540 | |
541 | // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks |
542 | // This requires that we pass the max luminance the tonemapper should clip to over to the fragment |
543 | // shader. To reduce computations there, it's precomputed in PQ values here. |
544 | auto fromLinear = convertSDRFromLinear; |
545 | switch (format.colorTransfer()) { |
546 | case QVideoFrameFormat::ColorTransfer_ST2084: |
547 | fromLinear = convertPQFromLinear; |
548 | break; |
549 | case QVideoFrameFormat::ColorTransfer_STD_B67: |
550 | fromLinear = convertHLGFromLinear; |
551 | break; |
552 | default: |
553 | break; |
554 | } |
555 | |
556 | if (dst->size() < qsizetype(sizeof(UniformData))) |
557 | dst->resize(size: sizeof(UniformData)); |
558 | |
559 | auto ud = reinterpret_cast<UniformData*>(dst->data()); |
560 | memcpy(dest: ud->transformMatrix, src: transform.constData(), n: sizeof(ud->transformMatrix)); |
561 | memcpy(dest: ud->colorMatrix, src: cmat.constData(), n: sizeof(ud->transformMatrix)); |
562 | ud->opacity = opacity; |
563 | ud->width = float(format.frameWidth()); |
564 | ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f); |
565 | ud->maxLum = fromLinear(float(maxNits)/100.f); |
566 | } |
567 | |
568 | enum class UpdateTextureWithMapResult : uint8_t { |
569 | Failed, |
570 | UpdatedWithDataCopy, |
571 | UpdatedWithDataReference |
572 | }; |
573 | |
574 | static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi *rhi, |
575 | QRhiResourceUpdateBatch *rub, int plane, |
576 | std::unique_ptr<QRhiTexture> &tex) |
577 | { |
578 | Q_ASSERT(frame.isMapped()); |
579 | |
580 | QVideoFrameFormat fmt = frame.surfaceFormat(); |
581 | QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat(); |
582 | QSize size = fmt.frameSize(); |
583 | |
584 | const TextureDescription &texDesc = descriptions[pixelFormat]; |
585 | QSize planeSize(size.width()/texDesc.sizeScale[plane].x, size.height()/texDesc.sizeScale[plane].y); |
586 | |
587 | bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.textureFormat[plane]; |
588 | if (!tex) { |
589 | tex.reset(p: rhi->newTexture(format: texDesc.textureFormat[plane], pixelSize: planeSize, sampleCount: 1, flags: {})); |
590 | if (!tex) { |
591 | qWarning(msg: "Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height()); |
592 | return UpdateTextureWithMapResult::Failed; |
593 | } |
594 | } |
595 | |
596 | if (needsRebuild) { |
597 | tex->setFormat(texDesc.textureFormat[plane]); |
598 | tex->setPixelSize(planeSize); |
599 | if (!tex->create()) { |
600 | qWarning(msg: "Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height()); |
601 | return UpdateTextureWithMapResult::Failed; |
602 | } |
603 | } |
604 | |
605 | auto result = UpdateTextureWithMapResult::UpdatedWithDataCopy; |
606 | |
607 | QRhiTextureSubresourceUploadDescription subresDesc; |
608 | |
609 | if (pixelFormat == QVideoFrameFormat::Format_Jpeg) { |
610 | Q_ASSERT(plane == 0); |
611 | |
612 | QImage image; |
613 | |
614 | // calling QVideoFrame::toImage is not accurate. To be fixed. |
615 | // frame transformation will be considered later |
616 | const QVideoFrameFormat surfaceFormat = frame.surfaceFormat(); |
617 | |
618 | const bool hasSurfaceTransform = surfaceFormat.isMirrored() |
619 | || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop |
620 | || surfaceFormat.rotation() != QtVideo::Rotation::None; |
621 | |
622 | if (hasSurfaceTransform) |
623 | image = qImageFromVideoFrame(frame, transformation: VideoTransformation{}); |
624 | else |
625 | image = frame.toImage(); // use the frame cache, no surface transforms applied |
626 | |
627 | image.convertTo(f: QImage::Format_ARGB32); |
628 | subresDesc.setImage(image); |
629 | |
630 | } else { |
631 | // Note, QByteArray::fromRawData creare QByteArray as a view without data copying |
632 | subresDesc.setData(QByteArray::fromRawData( |
633 | data: reinterpret_cast<const char *>(frame.bits(plane)), size: frame.mappedBytes(plane))); |
634 | subresDesc.setDataStride(frame.bytesPerLine(plane)); |
635 | result = UpdateTextureWithMapResult::UpdatedWithDataReference; |
636 | } |
637 | |
638 | QRhiTextureUploadEntry entry(0, 0, subresDesc); |
639 | QRhiTextureUploadDescription desc({ entry }); |
640 | rub->uploadTexture(tex: tex.get(), desc); |
641 | |
642 | return result; |
643 | } |
644 | |
645 | static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane) |
646 | { |
647 | QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame); |
648 | Q_ASSERT(hwBuffer); |
649 | |
650 | QVideoFrameFormat fmt = frame.surfaceFormat(); |
651 | QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat(); |
652 | QSize size = fmt.frameSize(); |
653 | |
654 | const TextureDescription &texDesc = descriptions[pixelFormat]; |
655 | QSize planeSize(size.width()/texDesc.sizeScale[plane].x, size.height()/texDesc.sizeScale[plane].y); |
656 | |
657 | QRhiTexture::Flags textureFlags = {}; |
658 | if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) { |
659 | #ifdef Q_OS_ANDROID |
660 | if (rhi->backend() == QRhi::OpenGLES2) |
661 | textureFlags |= QRhiTexture::ExternalOES; |
662 | #endif |
663 | } |
664 | if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) { |
665 | #ifdef Q_OS_MACOS |
666 | if (rhi->backend() == QRhi::OpenGLES2) |
667 | textureFlags |= QRhiTexture::TextureRectangleGL; |
668 | #endif |
669 | } |
670 | |
671 | if (quint64 handle = hwBuffer->textureHandle(rhi, plane); handle) { |
672 | std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format: texDesc.textureFormat[plane], pixelSize: planeSize, sampleCount: 1, flags: textureFlags)); |
673 | if (tex->createFrom(src: {.object: handle, .layout: 0})) |
674 | return tex; |
675 | |
676 | qWarning(msg: "Failed to initialize QRhiTexture wrapper for native texture object %llu",handle); |
677 | } |
678 | return {}; |
679 | } |
680 | |
681 | class QVideoFrameTexturesArray : public QVideoFrameTextures |
682 | { |
683 | public: |
684 | using TextureArray = std::array<std::unique_ptr<QRhiTexture>, TextureDescription::maxPlanes>; |
685 | QVideoFrameTexturesArray(TextureArray &&textures, QVideoFrame mappedFrame = {}) |
686 | : m_textures(std::move(textures)), m_mappedFrame(std::move(mappedFrame)) |
687 | { |
688 | Q_ASSERT(!m_mappedFrame.isValid() || m_mappedFrame.isReadable()); |
689 | } |
690 | |
691 | // We keep the source frame mapped during the target texture lifetime. |
692 | // Alternatively, we may use setting a custom image to QRhiTextureSubresourceUploadDescription, |
693 | // unsig videoFramePlaneAsImage, however, the OpenGL rendering pipeline in QRhi |
694 | // may keep QImage, and consequently the mapped QVideoFrame, |
695 | // even after the target texture is deleted: QTBUG-123174. |
696 | ~QVideoFrameTexturesArray() { m_mappedFrame.unmap(); } |
697 | |
698 | QRhiTexture *texture(uint plane) const override |
699 | { |
700 | return plane < std::size(cont: m_textures) ? m_textures[plane].get() : nullptr; |
701 | } |
702 | |
703 | TextureArray takeTextures() { return std::move(m_textures); } |
704 | |
705 | private: |
706 | TextureArray m_textures; |
707 | QVideoFrame m_mappedFrame; |
708 | }; |
709 | |
710 | static std::unique_ptr<QVideoFrameTextures> createTexturesFromHandles(const QVideoFrame &frame, QRhi *rhi) |
711 | { |
712 | const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()]; |
713 | bool ok = true; |
714 | QVideoFrameTexturesArray::TextureArray textures; |
715 | for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) { |
716 | textures[plane] = QVideoTextureHelper::createTextureFromHandle(frame, rhi, plane); |
717 | ok &= bool(textures[plane]); |
718 | } |
719 | if (ok) |
720 | return std::make_unique<QVideoFrameTexturesArray>(args: std::move(textures)); |
721 | else |
722 | return {}; |
723 | } |
724 | |
725 | static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, QVideoFrameTextures *old) |
726 | { |
727 | const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()]; |
728 | QVideoFrameTexturesArray::TextureArray textures; |
729 | auto oldArray = dynamic_cast<QVideoFrameTexturesArray *>(old); |
730 | if (oldArray) |
731 | textures = oldArray->takeTextures(); |
732 | |
733 | if (!frame.map(mode: QVideoFrame::ReadOnly)) { |
734 | qWarning() << "Cannot map a video frame in ReadOnly mode!"; |
735 | return {}; |
736 | } |
737 | |
738 | auto unmapFrameGuard = qScopeGuard(f: [&frame] { frame.unmap(); }); |
739 | |
740 | bool shouldKeepMapping = false; |
741 | for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) { |
742 | const auto result = updateTextureWithMap(frame, rhi, rub, plane, tex&: textures[plane]); |
743 | if (result == UpdateTextureWithMapResult::Failed) |
744 | return {}; |
745 | |
746 | if (result == UpdateTextureWithMapResult::UpdatedWithDataReference) |
747 | shouldKeepMapping = true; |
748 | } |
749 | |
750 | // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result |
751 | return std::make_unique<QVideoFrameTexturesArray>( |
752 | args: std::move(textures), args: shouldKeepMapping ? std::move(frame) : QVideoFrame()); |
753 | } |
754 | |
755 | std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures) |
756 | { |
757 | if (!frame.isValid()) |
758 | return {}; |
759 | |
760 | if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) { |
761 | if (auto textures = hwBuffer->mapTextures(rhi)) |
762 | return textures; |
763 | |
764 | if (auto textures = createTexturesFromHandles(frame, rhi)) |
765 | return textures; |
766 | } |
767 | |
768 | return createTexturesFromMemory(frame, rhi, rub, old: oldTextures.get()); |
769 | } |
770 | |
771 | bool SubtitleLayout::update(const QSize &frameSize, QString text) |
772 | { |
773 | text.replace(before: QLatin1Char('\n'), after: QChar::LineSeparator); |
774 | if (layout.text() == text && videoSize == frameSize) |
775 | return false; |
776 | |
777 | videoSize = frameSize; |
778 | QFont font; |
779 | // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size |
780 | qreal fontSize = frameSize.height() * 0.045; |
781 | font.setPointSize(fontSize); |
782 | |
783 | layout.setText(text); |
784 | if (text.isEmpty()) { |
785 | bounds = {}; |
786 | return true; |
787 | } |
788 | layout.setFont(font); |
789 | QTextOption option; |
790 | option.setUseDesignMetrics(true); |
791 | option.setAlignment(Qt::AlignCenter); |
792 | layout.setTextOption(option); |
793 | |
794 | QFontMetrics metrics(font); |
795 | int leading = metrics.leading(); |
796 | |
797 | qreal lineWidth = videoSize.width()*.9; |
798 | qreal margin = videoSize.width()*.05; |
799 | qreal height = 0; |
800 | qreal textWidth = 0; |
801 | layout.beginLayout(); |
802 | while (1) { |
803 | QTextLine line = layout.createLine(); |
804 | if (!line.isValid()) |
805 | break; |
806 | |
807 | line.setLineWidth(lineWidth); |
808 | height += leading; |
809 | line.setPosition(QPointF(margin, height)); |
810 | height += line.height(); |
811 | textWidth = qMax(a: textWidth, b: line.naturalTextWidth()); |
812 | } |
813 | layout.endLayout(); |
814 | |
815 | // put subtitles vertically in lower part of the video but not stuck to the bottom |
816 | int bottomMargin = videoSize.height() / 20; |
817 | qreal y = videoSize.height() - bottomMargin - height; |
818 | layout.setPosition(QPointF(0, y)); |
819 | textWidth += fontSize/4.; |
820 | |
821 | bounds = QRectF((videoSize.width() - textWidth)/2., y, textWidth, height); |
822 | return true; |
823 | } |
824 | |
825 | void SubtitleLayout::draw(QPainter *painter, const QPointF &translate) const |
826 | { |
827 | painter->save(); |
828 | painter->translate(offset: translate); |
829 | painter->setCompositionMode(QPainter::CompositionMode_SourceOver); |
830 | |
831 | QColor bgColor = Qt::black; |
832 | bgColor.setAlpha(128); |
833 | painter->setBrush(bgColor); |
834 | painter->setPen(Qt::NoPen); |
835 | painter->drawRect(rect: bounds); |
836 | |
837 | QTextLayout::FormatRange range; |
838 | range.start = 0; |
839 | range.length = layout.text().size(); |
840 | range.format.setForeground(Qt::white); |
841 | layout.draw(p: painter, pos: {}, selections: { range }); |
842 | painter->restore(); |
843 | } |
844 | |
845 | QImage SubtitleLayout::toImage() const |
846 | { |
847 | auto size = bounds.size().toSize(); |
848 | if (size.isEmpty()) |
849 | return QImage(); |
850 | QImage img(size, QImage::Format_RGBA8888_Premultiplied); |
851 | QColor bgColor = Qt::black; |
852 | bgColor.setAlpha(128); |
853 | img.fill(color: bgColor); |
854 | |
855 | QPainter painter(&img); |
856 | painter.translate(offset: -bounds.topLeft()); |
857 | QTextLayout::FormatRange range; |
858 | range.start = 0; |
859 | range.length = layout.text().size(); |
860 | range.format.setForeground(Qt::white); |
861 | layout.draw(p: &painter, pos: {}, selections: { range }); |
862 | return img; |
863 | } |
864 | |
865 | } |
866 | |
867 | QT_END_NAMESPACE |
868 |
Definitions
- descriptions
- textureDescription
- vertexShaderFileName
- fragmentShaderFileName
- colorMatrix
- convertPQFromLinear
- convertHLGFromLinear
- convertSDRFromLinear
- updateUniformData
- UpdateTextureWithMapResult
- updateTextureWithMap
- createTextureFromHandle
- QVideoFrameTexturesArray
- QVideoFrameTexturesArray
- ~QVideoFrameTexturesArray
- texture
- takeTextures
- createTexturesFromHandles
- createTexturesFromMemory
- createTextures
- update
- draw
Learn to use CMake with our Intro Training
Find out more