1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qvideoframeconverter_p.h"
5#include "qvideoframeconversionhelper_p.h"
6#include "qvideoframeformat.h"
7
8#include <QtCore/qcoreapplication.h>
9#include <QtCore/qsize.h>
10#include <QtCore/qhash.h>
11#include <QtCore/qfile.h>
12#include <QtCore/qthreadstorage.h>
13#include <QtGui/qimage.h>
14#include <QtGui/qoffscreensurface.h>
15#include <qpa/qplatformintegration.h>
16#include <private/qvideotexturehelper_p.h>
17#include <private/qabstractvideobuffer_p.h>
18#include <private/qguiapplication_p.h>
19#include <rhi/qrhi.h>
20
21#ifdef Q_OS_DARWIN
22#include <QtCore/private/qcore_mac_p.h>
23#endif
24
25QT_BEGIN_NAMESPACE
26
27static Q_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
28
29namespace {
30
31struct State
32{
33 QRhi *rhi = nullptr;
34#if QT_CONFIG(opengl)
35 QOffscreenSurface *fallbackSurface = nullptr;
36#endif
37 bool cpuOnly = false;
38 ~State() {
39 delete rhi;
40#if QT_CONFIG(opengl)
41 delete fallbackSurface;
42#endif
43 }
44};
45
46}
47
48static QThreadStorage<State> g_state;
49static QHash<QString, QShader> g_shaderCache;
50
51static const float g_quad[] = {
52 // Rotation 0 CW
53 1.f, -1.f, 1.f, 1.f,
54 1.f, 1.f, 1.f, 0.f,
55 -1.f, -1.f, 0.f, 1.f,
56 -1.f, 1.f, 0.f, 0.f,
57 // Rotation 90 CW
58 1.f, -1.f, 1.f, 0.f,
59 1.f, 1.f, 0.f, 0.f,
60 -1.f, -1.f, 1.f, 1.f,
61 -1.f, 1.f, 0.f, 1.f,
62 // Rotation 180 CW
63 1.f, -1.f, 0.f, 0.f,
64 1.f, 1.f, 0.f, 1.f,
65 -1.f, -1.f, 1.f, 0.f,
66 -1.f, 1.f, 1.f, 1.f,
67 // Rotation 270 CW
68 1.f, -1.f, 0.f, 1.f,
69 1.f, 1.f, 1.f, 1.f,
70 -1.f, -1.f, 0.f, 0.f,
71 -1.f, 1.f, 1.f, 0.f,
72};
73
74static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
75{
76 switch (format) {
77 case QVideoFrameFormat::Format_ARGB8888:
78 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
79 case QVideoFrameFormat::Format_BGRA8888:
80 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
81 case QVideoFrameFormat::Format_ABGR8888:
82 case QVideoFrameFormat::Format_RGBA8888:
83 case QVideoFrameFormat::Format_AYUV:
84 case QVideoFrameFormat::Format_AYUV_Premultiplied:
85 return true;
86 default:
87 return false;
88 }
89};
90
91static QShader vfcGetShader(const QString &name)
92{
93 QShader shader = g_shaderCache.value(key: name);
94 if (shader.isValid())
95 return shader;
96
97 QFile f(name);
98 if (f.open(flags: QIODevice::ReadOnly))
99 shader = QShader::fromSerialized(data: f.readAll());
100
101 if (shader.isValid())
102 g_shaderCache[name] = shader;
103
104 return shader;
105}
106
107static void rasterTransform(QImage &image, QVideoFrame::RotationAngle rotation,
108 bool mirrorX, bool mirrorY)
109{
110 QTransform t;
111 if (mirrorX)
112 t.scale(sx: -1.f, sy: 1.f);
113 if (rotation != QVideoFrame::Rotation0)
114 t.rotate(a: float(rotation));
115 if (mirrorY)
116 t.scale(sx: 1.f, sy: -1.f);
117 if (!t.isIdentity())
118 image = image.transformed(matrix: t);
119}
120
121static void imageCleanupHandler(void *info)
122{
123 QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
124 delete imageData;
125}
126
127static QRhi *initializeRHI(QRhi *videoFrameRhi)
128{
129 if (g_state.localData().rhi || g_state.localData().cpuOnly)
130 return g_state.localData().rhi;
131
132 QRhi::Implementation backend = videoFrameRhi ? videoFrameRhi->backend() : QRhi::Null;
133
134 if (QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::RhiBasedRendering)) {
135
136#if defined(Q_OS_MACOS) || defined(Q_OS_IOS)
137 if (backend == QRhi::Metal || backend == QRhi::Null) {
138 QRhiMetalInitParams params;
139 g_state.localData().rhi = QRhi::create(QRhi::Metal, &params);
140 }
141#endif
142
143#if defined(Q_OS_WIN)
144 if (backend == QRhi::D3D11 || backend == QRhi::Null) {
145 QRhiD3D11InitParams params;
146 g_state.localData().rhi = QRhi::create(QRhi::D3D11, &params);
147 }
148#endif
149
150#if QT_CONFIG(opengl)
151 if (!g_state.localData().rhi && (backend == QRhi::OpenGLES2 || backend == QRhi::Null)) {
152 if (QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::OpenGL)
153 && QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::RasterGLSurface)
154 && !QCoreApplication::testAttribute(attribute: Qt::AA_ForceRasterWidgets)) {
155
156 g_state.localData().fallbackSurface = QRhiGles2InitParams::newFallbackSurface();
157 QRhiGles2InitParams params;
158 params.fallbackSurface = g_state.localData().fallbackSurface;
159 if (backend == QRhi::OpenGLES2)
160 params.shareContext = static_cast<const QRhiGles2NativeHandles*>(videoFrameRhi->nativeHandles())->context;
161 g_state.localData().rhi = QRhi::create(impl: QRhi::OpenGLES2, params: &params);
162 }
163 }
164#endif
165 }
166
167 if (!g_state.localData().rhi) {
168 g_state.localData().cpuOnly = true;
169 qWarning() << Q_FUNC_INFO << ": No RHI backend. Using CPU conversion.";
170 }
171
172 return g_state.localData().rhi;
173}
174
175static bool updateTextures(QRhi *rhi,
176 std::unique_ptr<QRhiBuffer> &uniformBuffer,
177 std::unique_ptr<QRhiSampler> &textureSampler,
178 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
179 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
180 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
181 QVideoFrame &frame,
182 const std::unique_ptr<QVideoFrameTextures> &videoFrameTextures)
183{
184 auto format = frame.surfaceFormat();
185 auto pixelFormat = format.pixelFormat();
186
187 auto textureDesc = QVideoTextureHelper::textureDescription(format: pixelFormat);
188
189 QRhiShaderResourceBinding bindings[4];
190 auto *b = bindings;
191 *b++ = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
192 buf: uniformBuffer.get());
193 for (int i = 0; i < textureDesc->nplanes; ++i)
194 *b++ = QRhiShaderResourceBinding::sampledTexture(binding: i + 1, stage: QRhiShaderResourceBinding::FragmentStage,
195 tex: videoFrameTextures->texture(plane: i), sampler: textureSampler.get());
196 shaderResourceBindings->setBindings(first: bindings, last: b);
197 shaderResourceBindings->create();
198
199 graphicsPipeline.reset(p: rhi->newGraphicsPipeline());
200 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
201
202 QShader vs = vfcGetShader(name: QVideoTextureHelper::vertexShaderFileName(format));
203 if (!vs.isValid())
204 return false;
205
206 QShader fs = vfcGetShader(name: QVideoTextureHelper::fragmentShaderFileName(format));
207 if (!fs.isValid())
208 return false;
209
210 graphicsPipeline->setShaderStages({
211 { QRhiShaderStage::Vertex, vs },
212 { QRhiShaderStage::Fragment, fs }
213 });
214
215 QRhiVertexInputLayout inputLayout;
216 inputLayout.setBindings({
217 { 4 * sizeof(float) }
218 });
219 inputLayout.setAttributes({
220 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
221 { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
222 });
223
224 graphicsPipeline->setVertexInputLayout(inputLayout);
225 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
226 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
227 graphicsPipeline->create();
228
229 return true;
230}
231
232static QImage convertJPEG(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
233{
234 QVideoFrame varFrame = frame;
235 if (!varFrame.map(mode: QVideoFrame::ReadOnly)) {
236 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
237 return {};
238 }
239 QImage image;
240 image.loadFromData(buf: varFrame.bits(plane: 0), len: varFrame.mappedBytes(plane: 0), format: "JPG");
241 varFrame.unmap();
242 rasterTransform(image, rotation, mirrorX, mirrorY);
243 return image;
244}
245
246static QImage convertCPU(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
247{
248 VideoFrameConvertFunc convert = qConverterForFormat(format: frame.pixelFormat());
249 if (!convert) {
250 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
251 return {};
252 } else {
253 QVideoFrame varFrame = frame;
254 if (!varFrame.map(mode: QVideoFrame::ReadOnly)) {
255 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
256 return {};
257 }
258 auto format = pixelFormatHasAlpha(format: varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
259 QImage image = QImage(varFrame.width(), varFrame.height(), format);
260 convert(varFrame, image.bits());
261 varFrame.unmap();
262 rasterTransform(image, rotation, mirrorX, mirrorY);
263 return image;
264 }
265}
266
267QImage qImageFromVideoFrame(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
268{
269#ifdef Q_OS_DARWIN
270 QMacAutoReleasePool releasePool;
271#endif
272
273 if (!g_state.hasLocalData())
274 g_state.setLocalData({});
275
276 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
277 std::unique_ptr<QRhiBuffer> vertexBuffer;
278 std::unique_ptr<QRhiBuffer> uniformBuffer;
279 std::unique_ptr<QRhiTexture> targetTexture;
280 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
281 std::unique_ptr<QRhiSampler> textureSampler;
282 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
283 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
284
285 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
286 return {};
287
288 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
289 return convertJPEG(frame, rotation, mirrorX, mirrorY);
290
291 QRhi *rhi = nullptr;
292
293 if (frame.videoBuffer())
294 rhi = frame.videoBuffer()->rhi();
295
296 if (!rhi || rhi->thread() != QThread::currentThread())
297 rhi = initializeRHI(videoFrameRhi: rhi);
298
299 if (!rhi || rhi->isRecordingFrame())
300 return convertCPU(frame, rotation, mirrorX, mirrorY);
301
302 // Do conversion using shaders
303
304 const int rotationIndex = (rotation / 90) % 4;
305
306 QSize frameSize = frame.size();
307 if (rotationIndex % 2)
308 frameSize.transpose();
309
310 vertexBuffer.reset(p: rhi->newBuffer(type: QRhiBuffer::Immutable, usage: QRhiBuffer::VertexBuffer, size: sizeof(g_quad)));
311 vertexBuffer->create();
312
313 uniformBuffer.reset(p: rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: 64 + 64 + 4 + 4 + 4 + 4));
314 uniformBuffer->create();
315
316 textureSampler.reset(p: rhi->newSampler(magFilter: QRhiSampler::Linear, minFilter: QRhiSampler::Linear, mipmapMode: QRhiSampler::None,
317 addressU: QRhiSampler::ClampToEdge, addressV: QRhiSampler::ClampToEdge));
318 textureSampler->create();
319
320 shaderResourceBindings.reset(p: rhi->newShaderResourceBindings());
321
322 targetTexture.reset(p: rhi->newTexture(format: QRhiTexture::RGBA8, pixelSize: frameSize, sampleCount: 1, flags: QRhiTexture::RenderTarget));
323 if (!targetTexture->create()) {
324 qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
325 return convertCPU(frame, rotation, mirrorX, mirrorY);
326 }
327
328 renderTarget.reset(p: rhi->newTextureRenderTarget(desc: { { targetTexture.get() } }));
329 renderPass.reset(p: renderTarget->newCompatibleRenderPassDescriptor());
330 renderTarget->setRenderPassDescriptor(renderPass.get());
331 renderTarget->create();
332
333 QRhiCommandBuffer *cb = nullptr;
334 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(cb: &cb);
335 if (r != QRhi::FrameOpSuccess) {
336 qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
337 return convertCPU(frame, rotation, mirrorX, mirrorY);
338 }
339
340 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
341
342 rub->uploadStaticBuffer(buf: vertexBuffer.get(), data: g_quad);
343
344 QVideoFrame frameTmp = frame;
345 auto videoFrameTextures = QVideoTextureHelper::createTextures(frame&: frameTmp, rhi, rub, oldTextures: {});
346 if (!videoFrameTextures) {
347 qCDebug(qLcVideoFrameConverter) << "Failed obtain textures. Using CPU conversion.";
348 return convertCPU(frame, rotation, mirrorX, mirrorY);
349 }
350
351 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
352 graphicsPipeline, renderPass, frame&: frameTmp, videoFrameTextures)) {
353 qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
354 return convertCPU(frame, rotation, mirrorX, mirrorY);
355 }
356
357 float xScale = mirrorX ? -1.0 : 1.0;
358 float yScale = mirrorY ? -1.0 : 1.0;
359
360 if (rhi->isYUpInFramebuffer())
361 yScale = -yScale;
362
363 QMatrix4x4 transform;
364 transform.scale(x: xScale, y: yScale);
365
366 QByteArray uniformData(64 + 64 + 4 + 4, Qt::Uninitialized);
367 QVideoTextureHelper::updateUniformData(dst: &uniformData, format: frame.surfaceFormat(), frame, transform, opacity: 1.f);
368 rub->updateDynamicBuffer(buf: uniformBuffer.get(), offset: 0, size: uniformData.size(), data: uniformData.constData());
369
370 cb->beginPass(rt: renderTarget.get(), colorClearValue: Qt::black, depthStencilClearValue: { 1.0f, 0 }, resourceUpdates: rub);
371 cb->setGraphicsPipeline(graphicsPipeline.get());
372
373 cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
374 cb->setShaderResources(srb: shaderResourceBindings.get());
375
376 quint32 vertexOffset = quint32(sizeof(float)) * 16 * rotationIndex;
377 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
378 cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding);
379 cb->draw(vertexCount: 4);
380
381 QRhiReadbackDescription readDesc(targetTexture.get());
382 QRhiReadbackResult readResult;
383 bool readCompleted = false;
384
385 readResult.completed = [&readCompleted] { readCompleted = true; };
386
387 rub = rhi->nextResourceUpdateBatch();
388 rub->readBackTexture(rb: readDesc, result: &readResult);
389
390 cb->endPass(resourceUpdates: rub);
391
392 rhi->endOffscreenFrame();
393
394 if (!readCompleted) {
395 qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
396 return convertCPU(frame, rotation, mirrorX, mirrorY);
397 }
398
399 QByteArray *imageData = new QByteArray(readResult.data);
400
401 return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
402 readResult.pixelSize.width(), readResult.pixelSize.height(),
403 QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
404}
405
406QT_END_NAMESPACE
407
408

source code of qtmultimedia/src/multimedia/video/qvideoframeconverter.cpp