1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qffmpegvideobuffer_p.h"
5#include "private/qvideotexturehelper_p.h"
6#include "private/qmultimediautils_p.h"
7#include "qffmpeghwaccel_p.h"
8#include "qloggingcategory.h"
9#include <QtCore/qthread.h>
10
11extern "C" {
12#include <libavutil/pixdesc.h>
13#include <libavutil/hdr_dynamic_metadata.h>
14#include <libavutil/mastering_display_metadata.h>
15}
16
17QT_BEGIN_NAMESPACE
18
19using namespace QFFmpeg;
20
21static bool isFrameFlipped(const AVFrame& frame) {
22 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
23 if (frame.linesize[i] < 0)
24 return true;
25 }
26
27 return false;
28}
29
30QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
31 : QHwVideoBuffer(QVideoFrame::NoHandle),
32 m_frame(frame.get()),
33 m_size(qCalculateFrameSize(resolution: { frame->width, frame->height },
34 pixelAspectRatio: { .numerator: pixelAspectRatio.num, .denominator: pixelAspectRatio.den }))
35{
36 if (frame->hw_frames_ctx) {
37 m_hwFrame = std::move(frame);
38 m_pixelFormat = toQtPixelFormat(avPixelFormat: HWAccel::format(frame: m_hwFrame.get()));
39 return;
40 }
41
42 m_swFrame = std::move(frame);
43 m_pixelFormat = toQtPixelFormat(avPixelFormat: AVPixelFormat(m_swFrame->format));
44
45 convertSWFrame();
46}
47
48QFFmpegVideoBuffer::~QFFmpegVideoBuffer() = default;
49
50void QFFmpegVideoBuffer::convertSWFrame()
51{
52 Q_ASSERT(m_swFrame);
53
54 const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
55 const auto targetAVPixelFormat = toAVPixelFormat(pixelFormat: m_pixelFormat);
56
57 const QSize actualSize(m_swFrame->width, m_swFrame->height);
58 if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(frame: *m_swFrame)
59 || m_size != actualSize) {
60 Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
61 // convert the format into something we can handle
62 SwsContextUPtr scaleContext = createSwsContext(srcSize: actualSize, srcPixFmt: actualAVPixelFormat, dstSize: m_size,
63 dstPixFmt: targetAVPixelFormat, SWS_BICUBIC);
64
65 auto newFrame = makeAVFrame();
66 newFrame->width = m_size.width();
67 newFrame->height = m_size.height();
68 newFrame->format = targetAVPixelFormat;
69 av_frame_get_buffer(frame: newFrame.get(), align: 0);
70
71 sws_scale(c: scaleContext.get(), srcSlice: m_swFrame->data, srcStride: m_swFrame->linesize, srcSliceY: 0, srcSliceH: m_swFrame->height,
72 dst: newFrame->data, dstStride: newFrame->linesize);
73 if (m_frame == m_swFrame.get())
74 m_frame = newFrame.get();
75 m_swFrame = std::move(newFrame);
76 }
77}
78
79void QFFmpegVideoBuffer::initTextureConverter(QRhi &rhi)
80{
81 if (!m_hwFrame)
82 return;
83
84 // don't use the result reference here
85 ensureTextureConverter(rhi);
86
87 // the type is to be clarified in the method mapTextures
88 m_type = m_hwFrame && TextureConverter::isBackendAvailable(hwFrame&: *m_hwFrame)
89 ? QVideoFrame::RhiTextureHandle
90 : QVideoFrame::NoHandle;
91}
92
93QFFmpeg::TextureConverter &QFFmpegVideoBuffer::ensureTextureConverter(QRhi &rhi)
94{
95 Q_ASSERT(m_hwFrame);
96
97 HwFrameContextData &frameContextData = HwFrameContextData::ensure(hwFrame&: *m_hwFrame);
98 TextureConverter *converter = frameContextData.textureConverterMapper.get(rhi);
99
100 if (!converter) {
101 TextureConverter newConverter(rhi);
102
103 bool added = false;
104 std::tie(args&: converter, args&: added) =
105 frameContextData.textureConverterMapper.tryMap(rhi, value: TextureConverter(rhi));
106 // no issues are expected if it's already added in another thread, however,it's worth to
107 // check it
108 Q_ASSERT(converter && added);
109 }
110
111 return *converter;
112}
113
114QRhi *QFFmpegVideoBuffer::rhi() const
115{
116 if (!m_hwFrame)
117 return nullptr;
118
119 HwFrameContextData &frameContextData = HwFrameContextData::ensure(hwFrame&: *m_hwFrame);
120 return frameContextData.textureConverterMapper.findRhi(
121 p: [](QRhi &rhi) { return rhi.thread()->isCurrentThread(); });
122}
123
124QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
125{
126 return fromAvColorSpace(colorSpace: m_frame->colorspace);
127}
128
129QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
130{
131 return fromAvColorTransfer(colorTrc: m_frame->color_trc);
132}
133
134QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
135{
136 return fromAvColorRange(colorRange: m_frame->color_range);
137}
138
139float QFFmpegVideoBuffer::maxNits()
140{
141 float maxNits = -1;
142 for (int i = 0; i < m_frame->nb_side_data; ++i) {
143 AVFrameSideData *sd = m_frame->side_data[i];
144 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
145 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
146 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
147 auto maybeLum = QFFmpeg::mul(a: qreal(10'000.), b: data->max_luminance);
148 if (maybeLum)
149 maxNits = float(maybeLum.value());
150 }
151 }
152 return maxNits;
153}
154
155QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
156{
157 if (!m_swFrame) {
158 Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
159 m_swFrame = makeAVFrame();
160 /* retrieve data from GPU to CPU */
161 int ret = av_hwframe_transfer_data(dst: m_swFrame.get(), src: m_hwFrame.get(), flags: 0);
162 if (ret < 0) {
163 qWarning() << "Error transferring the data to system memory:" << ret;
164 return {};
165 }
166 convertSWFrame();
167 }
168
169 m_mode = mode;
170
171 MapData mapData;
172 auto *desc = QVideoTextureHelper::textureDescription(format: pixelFormat());
173 mapData.planeCount = desc->nplanes;
174 for (int i = 0; i < mapData.planeCount; ++i) {
175 Q_ASSERT(m_swFrame->linesize[i] >= 0);
176
177 mapData.data[i] = m_swFrame->data[i];
178 mapData.bytesPerLine[i] = m_swFrame->linesize[i];
179 mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(height: m_swFrame->height, plane: i);
180 }
181
182 if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
183 m_type = QVideoFrame::NoHandle;
184 m_hwFrame.reset();
185 }
186
187 return mapData;
188}
189
190void QFFmpegVideoBuffer::unmap()
191{
192 // nothing to do here for SW buffers.
193 // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
194 m_mode = QVideoFrame::NotMapped;
195}
196
197QVideoFrameTexturesUPtr QFFmpegVideoBuffer::mapTextures(QRhi &rhi, QVideoFrameTexturesUPtr& oldTextures)
198{
199 Q_ASSERT(rhi.thread()->isCurrentThread());
200
201 QVideoFrameTexturesUPtr result = createTexturesFromHwFrame(rhi, oldTextures);
202
203 // update m_type according to the real result
204 m_type = result ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle;
205 return result;
206}
207
208QVideoFrameTexturesUPtr QFFmpegVideoBuffer::createTexturesFromHwFrame(QRhi &rhi, QVideoFrameTexturesUPtr& oldTextures) {
209
210 if (!m_hwFrame)
211 return {};
212
213 // QTBUG-132200:
214 // We aim to set initTextureConverterForAnyRhi=true for as much platforms as we can,
215 // and remove the check after all platforms work fine on CI. If the flag is enabled,
216 // QVideoFrame::toImage can work faster, and we can test hw texture conversion on CI.
217 // Currently, enabling the flag fails some CI platforms.
218 constexpr bool initTextureConverterForAnyRhi = false;
219
220 TextureConverter *converter = initTextureConverterForAnyRhi
221 ? &ensureTextureConverter(rhi)
222 : HwFrameContextData::ensure(hwFrame&: *m_hwFrame).textureConverterMapper.get(rhi);
223
224 if (!converter)
225 return {};
226
227 if (!converter->init(hwFrame&: *m_hwFrame))
228 return {};
229
230 const QVideoFrameTextures *oldTexturesRaw = oldTextures.get();
231 if (QVideoFrameTexturesUPtr newTextures = converter->createTextures(hwFrame&: *m_hwFrame, oldTextures))
232 return newTextures;
233
234 Q_ASSERT(oldTextures.get() == oldTexturesRaw);
235
236 QVideoFrameTexturesHandlesUPtr oldTextureHandles =
237 oldTextures ? oldTextures->takeHandles() : nullptr;
238 QVideoFrameTexturesHandlesUPtr newTextureHandles =
239 converter->createTextureHandles(hwFrame&: *m_hwFrame, oldHandles: std::move(oldTextureHandles));
240
241 if (newTextureHandles) {
242 QVideoFrameTexturesUPtr newTextures = QVideoTextureHelper::createTexturesFromHandles(
243 handles: std::move(newTextureHandles), rhi, pixelFormat: m_pixelFormat,
244 size: { m_hwFrame->width, m_hwFrame->height });
245
246 return newTextures;
247 }
248
249 static thread_local int lastFormat = 0;
250 if (std::exchange(obj&: lastFormat, new_val&: m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
251 qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
252
253 return {};
254}
255
256QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
257{
258 return m_pixelFormat;
259}
260
261QSize QFFmpegVideoBuffer::size() const
262{
263 return m_size;
264}
265
266QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
267{
268 if (needsConversion)
269 *needsConversion = false;
270
271 switch (avPixelFormat) {
272 default:
273 break;
274 case AV_PIX_FMT_NONE:
275 Q_ASSERT(!"Invalid avPixelFormat!");
276 return QVideoFrameFormat::Format_Invalid;
277 case AV_PIX_FMT_ARGB:
278 return QVideoFrameFormat::Format_ARGB8888;
279 case AV_PIX_FMT_0RGB:
280 return QVideoFrameFormat::Format_XRGB8888;
281 case AV_PIX_FMT_BGRA:
282 return QVideoFrameFormat::Format_BGRA8888;
283 case AV_PIX_FMT_BGR0:
284 return QVideoFrameFormat::Format_BGRX8888;
285 case AV_PIX_FMT_ABGR:
286 return QVideoFrameFormat::Format_ABGR8888;
287 case AV_PIX_FMT_0BGR:
288 return QVideoFrameFormat::Format_XBGR8888;
289 case AV_PIX_FMT_RGBA:
290 return QVideoFrameFormat::Format_RGBA8888;
291 case AV_PIX_FMT_RGB0:
292 return QVideoFrameFormat::Format_RGBX8888;
293
294 case AV_PIX_FMT_YUV422P:
295 return QVideoFrameFormat::Format_YUV422P;
296 case AV_PIX_FMT_YUV420P:
297 return QVideoFrameFormat::Format_YUV420P;
298 case AV_PIX_FMT_YUV420P10:
299 return QVideoFrameFormat::Format_YUV420P10;
300 case AV_PIX_FMT_UYVY422:
301 return QVideoFrameFormat::Format_UYVY;
302 case AV_PIX_FMT_YUYV422:
303 return QVideoFrameFormat::Format_YUYV;
304 case AV_PIX_FMT_NV12:
305 return QVideoFrameFormat::Format_NV12;
306 case AV_PIX_FMT_NV21:
307 return QVideoFrameFormat::Format_NV21;
308 case AV_PIX_FMT_GRAY8:
309 return QVideoFrameFormat::Format_Y8;
310 case AV_PIX_FMT_GRAY16:
311 return QVideoFrameFormat::Format_Y16;
312
313 case AV_PIX_FMT_P010:
314 return QVideoFrameFormat::Format_P010;
315 case AV_PIX_FMT_P016:
316 return QVideoFrameFormat::Format_P016;
317 case AV_PIX_FMT_MEDIACODEC:
318 return QVideoFrameFormat::Format_SamplerExternalOES;
319 }
320
321 if (needsConversion)
322 *needsConversion = true;
323
324 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(pix_fmt: avPixelFormat);
325
326 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
327 return QVideoFrameFormat::Format_RGBA8888;
328
329 if (descriptor->comp[0].depth > 8)
330 return QVideoFrameFormat::Format_P016;
331 return QVideoFrameFormat::Format_YUV420P;
332}
333
334AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
335{
336 switch (pixelFormat) {
337 default:
338 case QVideoFrameFormat::Format_Invalid:
339 case QVideoFrameFormat::Format_AYUV:
340 case QVideoFrameFormat::Format_AYUV_Premultiplied:
341 case QVideoFrameFormat::Format_YV12:
342 case QVideoFrameFormat::Format_IMC1:
343 case QVideoFrameFormat::Format_IMC2:
344 case QVideoFrameFormat::Format_IMC3:
345 case QVideoFrameFormat::Format_IMC4:
346 return AV_PIX_FMT_NONE;
347 case QVideoFrameFormat::Format_Jpeg:
348 // We're using the data from the converted QImage here, which is in BGRA.
349 return AV_PIX_FMT_BGRA;
350 case QVideoFrameFormat::Format_ARGB8888:
351 return AV_PIX_FMT_ARGB;
352 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
353 case QVideoFrameFormat::Format_XRGB8888:
354 return AV_PIX_FMT_0RGB;
355 case QVideoFrameFormat::Format_BGRA8888:
356 return AV_PIX_FMT_BGRA;
357 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
358 case QVideoFrameFormat::Format_BGRX8888:
359 return AV_PIX_FMT_BGR0;
360 case QVideoFrameFormat::Format_ABGR8888:
361 return AV_PIX_FMT_ABGR;
362 case QVideoFrameFormat::Format_XBGR8888:
363 return AV_PIX_FMT_0BGR;
364 case QVideoFrameFormat::Format_RGBA8888:
365 return AV_PIX_FMT_RGBA;
366 // to be added in 6.8:
367 // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
368 case QVideoFrameFormat::Format_RGBX8888:
369 return AV_PIX_FMT_RGB0;
370
371 case QVideoFrameFormat::Format_YUV422P:
372 return AV_PIX_FMT_YUV422P;
373 case QVideoFrameFormat::Format_YUV420P:
374 return AV_PIX_FMT_YUV420P;
375 case QVideoFrameFormat::Format_YUV420P10:
376 return AV_PIX_FMT_YUV420P10;
377 case QVideoFrameFormat::Format_UYVY:
378 return AV_PIX_FMT_UYVY422;
379 case QVideoFrameFormat::Format_YUYV:
380 return AV_PIX_FMT_YUYV422;
381 case QVideoFrameFormat::Format_NV12:
382 return AV_PIX_FMT_NV12;
383 case QVideoFrameFormat::Format_NV21:
384 return AV_PIX_FMT_NV21;
385 case QVideoFrameFormat::Format_Y8:
386 return AV_PIX_FMT_GRAY8;
387 case QVideoFrameFormat::Format_Y16:
388 return AV_PIX_FMT_GRAY16;
389
390 case QVideoFrameFormat::Format_P010:
391 return AV_PIX_FMT_P010;
392 case QVideoFrameFormat::Format_P016:
393 return AV_PIX_FMT_P016;
394
395 case QVideoFrameFormat::Format_SamplerExternalOES:
396 return AV_PIX_FMT_MEDIACODEC;
397 }
398}
399
400QT_END_NAMESPACE
401

source code of qtmultimedia/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp