1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qgstvideobuffer_p.h"
5#include "qgstreamervideosink_p.h"
6#include <private/qvideotexturehelper_p.h>
7#include <qpa/qplatformnativeinterface.h>
8#include <qguiapplication.h>
9
10#include <gst/video/video.h>
11#include <gst/video/video-frame.h>
12#include <gst/video/gstvideometa.h>
13#include <gst/pbutils/gstpluginsbaseversion.h>
14
15#include <common/qgstutils_p.h>
16
17#if QT_CONFIG(gstreamer_gl)
18# include <QtGui/rhi/qrhi.h>
19# include <QtGui/qopenglcontext.h>
20# include <QtGui/qopenglfunctions.h>
21# include <QtGui/qopengl.h>
22
23# include <gst/gl/gstglconfig.h>
24# include <gst/gl/gstglmemory.h>
25# include <gst/gl/gstglsyncmeta.h>
26
27# if QT_CONFIG(gstreamer_gl_egl)
28# include <EGL/egl.h>
29# include <EGL/eglext.h>
30# endif
31
32# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
33# include <gst/allocators/gstdmabuf.h>
34# endif
35#endif
36
37QT_BEGIN_NAMESPACE
38
39// keep things building without drm_fourcc.h
40#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \
41 ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
42
43#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
44#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
45#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
46#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
47#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
48#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
49#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
50#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
51#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
52#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
53#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
54#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
55
56QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info,
57 QGstreamerVideoSink *sink, const QVideoFrameFormat &frameFormat,
58 QGstCaps::MemoryFormat format)
59 : QHwVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory)
60 ? QVideoFrame::RhiTextureHandle
61 : QVideoFrame::NoHandle,
62 sink ? sink->rhi() : nullptr),
63 memoryFormat(format),
64 m_frameFormat(frameFormat),
65 m_rhi(sink ? sink->rhi() : nullptr),
66 m_videoInfo(info),
67 m_buffer(std::move(buffer))
68{
69#if QT_CONFIG(gstreamer_gl_egl)
70 if (sink) {
71 eglDisplay = sink->eglDisplay();
72 eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
73 }
74#endif
75 Q_UNUSED(memoryFormat);
76 Q_UNUSED(eglDisplay);
77 Q_UNUSED(eglImageTargetTexture2D);
78}
79
80QGstVideoBuffer::~QGstVideoBuffer()
81{
82 Q_ASSERT(m_mode == QVideoFrame::NotMapped);
83}
84
85QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
86{
87 const GstMapFlags flags = GstMapFlags(((mode & QVideoFrame::ReadOnly) ? GST_MAP_READ : 0)
88 | ((mode & QVideoFrame::WriteOnly) ? GST_MAP_WRITE : 0));
89
90 MapData mapData;
91 if (mode == QVideoFrame::NotMapped || m_mode != QVideoFrame::NotMapped)
92 return mapData;
93
94 if (m_videoInfo.finfo->n_planes == 0) { // Encoded
95 if (gst_buffer_map(buffer: m_buffer.get(), info: &m_frame.map[0], flags)) {
96 mapData.planeCount = 1;
97 mapData.bytesPerLine[0] = -1;
98 mapData.dataSize[0] = m_frame.map[0].size;
99 mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
100
101 m_mode = mode;
102 }
103 } else if (gst_video_frame_map(frame: &m_frame, info: &m_videoInfo, buffer: m_buffer.get(), flags)) {
104 mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame);
105
106 for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
107 mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
108 mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
109 mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
110 }
111
112 m_mode = mode;
113 }
114 return mapData;
115}
116
117void QGstVideoBuffer::unmap()
118{
119 if (m_mode != QVideoFrame::NotMapped) {
120 if (m_videoInfo.finfo->n_planes == 0)
121 gst_buffer_unmap(buffer: m_buffer.get(), info: &m_frame.map[0]);
122 else
123 gst_video_frame_unmap(frame: &m_frame);
124 }
125 m_mode = QVideoFrame::NotMapped;
126}
127
128#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
129static int
130fourccFromVideoInfo(const GstVideoInfo * info, int plane)
131{
132 GstVideoFormat format = GST_VIDEO_INFO_FORMAT (info);
133#if G_BYTE_ORDER == G_LITTLE_ENDIAN
134 const gint rgba_fourcc = DRM_FORMAT_ABGR8888;
135 const gint rgb_fourcc = DRM_FORMAT_BGR888;
136 const gint rg_fourcc = DRM_FORMAT_GR88;
137#else
138 const gint rgba_fourcc = DRM_FORMAT_RGBA8888;
139 const gint rgb_fourcc = DRM_FORMAT_RGB888;
140 const gint rg_fourcc = DRM_FORMAT_RG88;
141#endif
142
143 GST_DEBUG ("Getting DRM fourcc for %s plane %i",
144 gst_video_format_to_string (format), plane);
145
146 switch (format) {
147 case GST_VIDEO_FORMAT_RGB16:
148 case GST_VIDEO_FORMAT_BGR16:
149 return DRM_FORMAT_RGB565;
150
151 case GST_VIDEO_FORMAT_RGB:
152 case GST_VIDEO_FORMAT_BGR:
153 return rgb_fourcc;
154
155 case GST_VIDEO_FORMAT_RGBA:
156 case GST_VIDEO_FORMAT_RGBx:
157 case GST_VIDEO_FORMAT_BGRA:
158 case GST_VIDEO_FORMAT_BGRx:
159 case GST_VIDEO_FORMAT_ARGB:
160 case GST_VIDEO_FORMAT_xRGB:
161 case GST_VIDEO_FORMAT_ABGR:
162 case GST_VIDEO_FORMAT_xBGR:
163 case GST_VIDEO_FORMAT_AYUV:
164#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
165 case GST_VIDEO_FORMAT_VUYA:
166#endif
167 return rgba_fourcc;
168
169 case GST_VIDEO_FORMAT_GRAY8:
170 return DRM_FORMAT_R8;
171
172 case GST_VIDEO_FORMAT_YUY2:
173 case GST_VIDEO_FORMAT_UYVY:
174 case GST_VIDEO_FORMAT_GRAY16_LE:
175 case GST_VIDEO_FORMAT_GRAY16_BE:
176 return rg_fourcc;
177
178 case GST_VIDEO_FORMAT_NV12:
179 case GST_VIDEO_FORMAT_NV21:
180 return plane == 0 ? DRM_FORMAT_R8 : rg_fourcc;
181
182 case GST_VIDEO_FORMAT_I420:
183 case GST_VIDEO_FORMAT_YV12:
184 case GST_VIDEO_FORMAT_Y41B:
185 case GST_VIDEO_FORMAT_Y42B:
186 case GST_VIDEO_FORMAT_Y444:
187 return DRM_FORMAT_R8;
188
189#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
190 case GST_VIDEO_FORMAT_BGR10A2_LE:
191 return DRM_FORMAT_BGRA1010102;
192#endif
193
194// case GST_VIDEO_FORMAT_RGB10A2_LE:
195// return DRM_FORMAT_RGBA1010102;
196
197 case GST_VIDEO_FORMAT_P010_10LE:
198// case GST_VIDEO_FORMAT_P012_LE:
199// case GST_VIDEO_FORMAT_P016_LE:
200 return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_GR1616;
201
202 case GST_VIDEO_FORMAT_P010_10BE:
203// case GST_VIDEO_FORMAT_P012_BE:
204// case GST_VIDEO_FORMAT_P016_BE:
205 return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_RG1616;
206
207 default:
208 GST_ERROR ("Unsupported format for DMABuf.");
209 return -1;
210 }
211}
212#endif
213
214#if QT_CONFIG(gstreamer_gl)
215struct GlTextures
216{
217 uint count = 0;
218 bool owned = false;
219 std::array<guint32, QVideoTextureHelper::TextureDescription::maxPlanes> names{};
220};
221
222class QGstQVideoFrameTextures : public QVideoFrameTextures
223{
224public:
225 QGstQVideoFrameTextures(QRhi *rhi, QSize size, QVideoFrameFormat::PixelFormat format, GlTextures &textures)
226 : m_rhi(rhi)
227 , m_glTextures(textures)
228 {
229 auto desc = QVideoTextureHelper::textureDescription(format);
230 for (uint i = 0; i < textures.count; ++i) {
231 QSize planeSize(desc->widthForPlane(width: size.width(), plane: int(i)),
232 desc->heightForPlane(height: size.height(), plane: int(i)));
233 m_textures[i].reset(p: rhi->newTexture(format: desc->textureFormat[i], pixelSize: planeSize, sampleCount: 1, flags: {}));
234 m_textures[i]->createFrom(src: {.object: textures.names[i], .layout: 0});
235 }
236 }
237
238 ~QGstQVideoFrameTextures()
239 {
240 m_rhi->makeThreadLocalNativeContextCurrent();
241 auto ctx = QOpenGLContext::currentContext();
242 if (m_glTextures.owned && ctx)
243 ctx->functions()->glDeleteTextures(n: int(m_glTextures.count), textures: m_glTextures.names.data());
244 }
245
246 QRhiTexture *texture(uint plane) const override
247 {
248 return plane < m_glTextures.count ? m_textures[plane].get() : nullptr;
249 }
250
251private:
252 QRhi *m_rhi = nullptr;
253 GlTextures m_glTextures;
254 std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes];
255};
256
257static GlTextures mapFromGlTexture(const QGstBufferHandle &bufferHandle, GstVideoFrame &frame,
258 GstVideoInfo &videoInfo)
259{
260 GstBuffer *buffer = bufferHandle.get();
261 auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
262 if (!mem)
263 return {};
264
265 if (!gst_video_frame_map(frame: &frame, info: &videoInfo, buffer, flags: GstMapFlags(GST_MAP_READ|GST_MAP_GL))) {
266 qWarning() << "Could not map GL textures";
267 return {};
268 }
269
270 auto *sync_meta = gst_buffer_get_gl_sync_meta(buffer);
271 GstBuffer *sync_buffer = nullptr;
272 if (!sync_meta) {
273 sync_buffer = gst_buffer_new();
274 sync_meta = gst_buffer_add_gl_sync_meta(context: mem->context, buffer: sync_buffer);
275 }
276 gst_gl_sync_meta_set_sync_point (sync_meta, context: mem->context);
277 gst_gl_sync_meta_wait (sync_meta, context: mem->context);
278 if (sync_buffer)
279 gst_buffer_unref(buf: sync_buffer);
280
281 GlTextures textures;
282 textures.count = frame.info.finfo->n_planes;
283
284 for (uint i = 0; i < textures.count; ++i)
285 textures.names[i] = *(guint32 *)frame.data[i];
286
287 gst_video_frame_unmap(frame: &frame);
288
289 return textures;
290}
291
292# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
293static GlTextures mapFromDmaBuffer(QRhi *rhi, const QGstBufferHandle &bufferHandle,
294 GstVideoFrame &frame, GstVideoInfo &videoInfo,
295 Qt::HANDLE eglDisplay, QFunctionPointer eglImageTargetTexture2D)
296{
297 GstBuffer *buffer = bufferHandle.get();
298
299 Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0)));
300 Q_ASSERT(eglDisplay);
301 Q_ASSERT(eglImageTargetTexture2D);
302
303 auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
304 auto glContext = nativeHandles->context;
305 if (!glContext) {
306 qWarning() << "no GL context";
307 return {};
308 }
309
310 if (!gst_video_frame_map(frame: &frame, info: &videoInfo, buffer, flags: GstMapFlags(GST_MAP_READ))) {
311 qDebug() << "Couldn't map DMA video frame";
312 return {};
313 }
314
315 GlTextures textures = {};
316 textures.owned = true;
317 textures.count = GST_VIDEO_FRAME_N_PLANES(&frame);
318 // int width = GST_VIDEO_FRAME_WIDTH(&frame);
319 // int height = GST_VIDEO_FRAME_HEIGHT(&frame);
320 Q_ASSERT(GST_VIDEO_FRAME_N_PLANES(&frame) == gst_buffer_n_memory(buffer));
321
322 QOpenGLFunctions functions(glContext);
323 functions.glGenTextures(n: int(textures.count), textures: textures.names.data());
324
325 // qDebug() << Qt::hex << "glGenTextures: glerror" << glGetError() << "egl error" << eglGetError();
326 // qDebug() << "converting DMA buffer nPlanes=" << nPlanes << m_textures[0] << m_textures[1] << m_textures[2];
327
328 for (int i = 0; i < int(textures.count); ++i) {
329 auto offset = GST_VIDEO_FRAME_PLANE_OFFSET(&frame, i);
330 auto stride = GST_VIDEO_FRAME_PLANE_STRIDE(&frame, i);
331 int planeWidth = GST_VIDEO_FRAME_COMP_WIDTH(&frame, i);
332 int planeHeight = GST_VIDEO_FRAME_COMP_HEIGHT(&frame, i);
333 auto mem = gst_buffer_peek_memory(buffer, idx: i);
334 int fd = gst_dmabuf_memory_get_fd(mem);
335
336 // qDebug() << " plane" << i << "size" << width << height << "stride" << stride << "offset" << offset << "fd=" << fd;
337 // ### do we need to open/close the fd?
338 // ### can we convert several planes at once?
339 // Get the correct DRM_FORMATs from the texture format in the description
340 EGLAttrib const attribute_list[] = {
341 EGL_WIDTH, planeWidth,
342 EGL_HEIGHT, planeHeight,
343 EGL_LINUX_DRM_FOURCC_EXT, fourccFromVideoInfo(info: &videoInfo, plane: i),
344 EGL_DMA_BUF_PLANE0_FD_EXT, fd,
345 EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLAttrib)offset,
346 EGL_DMA_BUF_PLANE0_PITCH_EXT, stride,
347 EGL_NONE
348 };
349 EGLImage image = eglCreateImage(dpy: eglDisplay,
350 EGL_NO_CONTEXT,
351 EGL_LINUX_DMA_BUF_EXT,
352 buffer: nullptr,
353 attrib_list: attribute_list);
354 if (image == EGL_NO_IMAGE_KHR) {
355 qWarning() << "could not create EGL image for plane" << i << Qt::hex << eglGetError();
356 }
357 // qDebug() << Qt::hex << "eglCreateImage: glerror" << glGetError() << "egl error" << eglGetError();
358 functions.glBindTexture(GL_TEXTURE_2D, texture: textures.names[i]);
359 // qDebug() << Qt::hex << "bind texture: glerror" << glGetError() << "egl error" << eglGetError();
360 auto EGLImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglImageTargetTexture2D;
361 EGLImageTargetTexture2D(GL_TEXTURE_2D, image);
362 // qDebug() << Qt::hex << "glerror" << glGetError() << "egl error" << eglGetError();
363 eglDestroyImage(dpy: eglDisplay, image);
364 }
365 gst_video_frame_unmap(frame: &frame);
366
367 return textures;
368}
369#endif
370#endif
371
372std::unique_ptr<QVideoFrameTextures> QGstVideoBuffer::mapTextures(QRhi *rhi)
373{
374 if (!rhi)
375 return {};
376
377#if QT_CONFIG(gstreamer_gl)
378 GlTextures textures = {};
379 if (memoryFormat == QGstCaps::GLTexture)
380 textures = mapFromGlTexture(bufferHandle: m_buffer, frame&: m_frame, videoInfo&: m_videoInfo);
381
382# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
383 else if (memoryFormat == QGstCaps::DMABuf)
384 textures = mapFromDmaBuffer(rhi: m_rhi, bufferHandle: m_buffer, frame&: m_frame, videoInfo&: m_videoInfo, eglDisplay,
385 eglImageTargetTexture2D);
386
387# endif
388 if (textures.count > 0)
389 return std::make_unique<QGstQVideoFrameTextures>(args&: rhi, args: QSize{m_videoInfo.width, m_videoInfo.height},
390 args: m_frameFormat.pixelFormat(), args&: textures);
391#endif
392 return {};
393}
394
395QT_END_NAMESPACE
396

Provided by KDAB

Privacy Policy
Start learning QML with our Intro Training
Find out more

source code of qtmultimedia/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp