1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qgstreamerimagecapture_p.h" |
5 | #include <private/qplatformcamera_p.h> |
6 | #include <private/qplatformimagecapture_p.h> |
7 | #include <qgstvideobuffer_p.h> |
8 | #include <qgstutils_p.h> |
9 | #include <qgstreamermetadata_p.h> |
10 | #include <qvideoframeformat.h> |
11 | #include <private/qmediastoragelocation_p.h> |
12 | |
13 | #include <QtCore/QDebug> |
14 | #include <QtCore/QDir> |
15 | #include <utility> |
16 | #include <qstandardpaths.h> |
17 | |
18 | #include <qloggingcategory.h> |
19 | |
20 | QT_BEGIN_NAMESPACE |
21 | |
22 | static Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture" ) |
23 | |
24 | QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent) |
25 | { |
26 | QGstElement videoconvert("videoconvert" , "imageCaptureConvert" ); |
27 | if (!videoconvert) |
28 | return errorMessageCannotFindElement(element: "videoconvert" ); |
29 | |
30 | QGstElement jpegenc("jpegenc" , "jpegEncoder" ); |
31 | if (!jpegenc) |
32 | return errorMessageCannotFindElement(element: "jpegenc" ); |
33 | |
34 | QGstElement jifmux("jifmux" , "jpegMuxer" ); |
35 | if (!jifmux) |
36 | return errorMessageCannotFindElement(element: "jifmux" ); |
37 | |
38 | return new QGstreamerImageCapture(videoconvert, jpegenc, jifmux, parent); |
39 | } |
40 | |
41 | QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstElement jpegenc, |
42 | QGstElement jifmux, QImageCapture *parent) |
43 | : QPlatformImageCapture(parent), |
44 | QGstreamerBufferProbe(ProbeBuffers), |
45 | videoConvert(std::move(videoconvert)), |
46 | encoder(std::move(jpegenc)), |
47 | muxer(std::move(jifmux)) |
48 | { |
49 | bin = QGstBin("imageCaptureBin" ); |
50 | |
51 | queue = QGstElement("queue" , "imageCaptureQueue" ); |
52 | // configures the queue to be fast, lightweight and non blocking |
53 | queue.set(property: "leaky" , i: 2 /*downstream*/); |
54 | queue.set(property: "silent" , b: true); |
55 | queue.set(property: "max-size-buffers" , i: uint(1)); |
56 | queue.set(property: "max-size-bytes" , i: uint(0)); |
57 | queue.set(property: "max-size-time" , i: quint64(0)); |
58 | |
59 | sink = QGstElement("fakesink" ,"imageCaptureSink" ); |
60 | filter = QGstElement("capsfilter" , "filter" ); |
61 | // imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED |
62 | // as no buffer will arrive until capture() is called |
63 | sink.set(property: "async" , b: false); |
64 | |
65 | bin.add(e1: queue, e2: filter, e3: videoConvert, e4: encoder, e5: muxer, e6: sink); |
66 | queue.link(n1: filter, n2: videoConvert, n3: encoder, n4: muxer, n5: sink); |
67 | bin.addGhostPad(child: queue, name: "sink" ); |
68 | |
69 | addProbeToPad(pad: queue.staticPad(name: "src" ).pad(), downstream: false); |
70 | |
71 | sink.set(property: "signal-handoffs" , b: true); |
72 | g_signal_connect(sink.object(), "handoff" , G_CALLBACK(&QGstreamerImageCapture::saveImageFilter), this); |
73 | } |
74 | |
75 | QGstreamerImageCapture::~QGstreamerImageCapture() |
76 | { |
77 | bin.setStateSync(GST_STATE_NULL); |
78 | } |
79 | |
80 | bool QGstreamerImageCapture::isReadyForCapture() const |
81 | { |
82 | return m_session && !passImage && cameraActive; |
83 | } |
84 | |
85 | int QGstreamerImageCapture::capture(const QString &fileName) |
86 | { |
87 | QString path = QMediaStorageLocation::generateFileName(requestedName: fileName, type: QStandardPaths::PicturesLocation, extension: QLatin1String("jpg" )); |
88 | return doCapture(fileName: path); |
89 | } |
90 | |
91 | int QGstreamerImageCapture::captureToBuffer() |
92 | { |
93 | return doCapture(fileName: QString()); |
94 | } |
95 | |
96 | int QGstreamerImageCapture::doCapture(const QString &fileName) |
97 | { |
98 | qCDebug(qLcImageCaptureGst) << "do capture" ; |
99 | if (!m_session) { |
100 | //emit error in the next event loop, |
101 | //so application can associate it with returned request id. |
102 | QMetaObject::invokeMethod(obj: this, member: "error" , c: Qt::QueuedConnection, |
103 | Q_ARG(int, -1), |
104 | Q_ARG(int, QImageCapture::ResourceError), |
105 | Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet())); |
106 | |
107 | qCDebug(qLcImageCaptureGst) << "error 1" ; |
108 | return -1; |
109 | } |
110 | if (!m_session->camera()) { |
111 | //emit error in the next event loop, |
112 | //so application can associate it with returned request id. |
113 | QMetaObject::invokeMethod(obj: this, member: "error" , c: Qt::QueuedConnection, |
114 | Q_ARG(int, -1), |
115 | Q_ARG(int, QImageCapture::ResourceError), |
116 | Q_ARG(QString,tr("No camera available." ))); |
117 | |
118 | qCDebug(qLcImageCaptureGst) << "error 2" ; |
119 | return -1; |
120 | } |
121 | if (passImage) { |
122 | //emit error in the next event loop, |
123 | //so application can associate it with returned request id. |
124 | QMetaObject::invokeMethod(obj: this, member: "error" , c: Qt::QueuedConnection, |
125 | Q_ARG(int, -1), |
126 | Q_ARG(int, QImageCapture::NotReadyError), |
127 | Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady())); |
128 | |
129 | qCDebug(qLcImageCaptureGst) << "error 3" ; |
130 | return -1; |
131 | } |
132 | m_lastId++; |
133 | |
134 | pendingImages.enqueue(t: {.id: m_lastId, .filename: fileName, .metaData: QMediaMetaData{}}); |
135 | // let one image pass the pipeline |
136 | passImage = true; |
137 | |
138 | emit readyForCaptureChanged(ready: false); |
139 | return m_lastId; |
140 | } |
141 | |
142 | void QGstreamerImageCapture::setResolution(const QSize &resolution) |
143 | { |
144 | auto padCaps = QGstCaps(gst_pad_get_current_caps(pad: bin.staticPad(name: "sink" ).pad()), QGstCaps::HasRef); |
145 | if (padCaps.isNull()) { |
146 | qDebug() << "Camera not ready" ; |
147 | return; |
148 | } |
149 | auto caps = QGstCaps(gst_caps_copy(padCaps.get()), QGstCaps::HasRef); |
150 | if (caps.isNull()) { |
151 | return; |
152 | } |
153 | gst_caps_set_simple(caps: caps.get(), |
154 | field: "width" , G_TYPE_INT, resolution.width(), |
155 | "height" , G_TYPE_INT, resolution.height(), |
156 | nullptr); |
157 | filter.set(property: "caps" , c: caps); |
158 | } |
159 | |
160 | bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer) |
161 | { |
162 | if (!passImage) |
163 | return false; |
164 | qCDebug(qLcImageCaptureGst) << "probe buffer" ; |
165 | |
166 | passImage = false; |
167 | |
168 | emit readyForCaptureChanged(ready: isReadyForCapture()); |
169 | |
170 | auto caps = QGstCaps(gst_pad_get_current_caps(pad: bin.staticPad(name: "sink" ).pad()), QGstCaps::HasRef); |
171 | GstVideoInfo previewInfo; |
172 | gst_video_info_from_caps(info: &previewInfo, caps: caps.get()); |
173 | |
174 | auto memoryFormat = caps.memoryFormat(); |
175 | auto fmt = caps.formatForCaps(info: &previewInfo); |
176 | auto *sink = m_session->gstreamerVideoSink(); |
177 | auto *gstBuffer = new QGstVideoBuffer(buffer, previewInfo, sink, fmt, memoryFormat); |
178 | QVideoFrame frame(gstBuffer, fmt); |
179 | QImage img = frame.toImage(); |
180 | if (img.isNull()) { |
181 | qDebug() << "received a null image" ; |
182 | return true; |
183 | } |
184 | |
185 | auto &imageData = pendingImages.head(); |
186 | |
187 | emit imageExposed(requestId: imageData.id); |
188 | |
189 | qCDebug(qLcImageCaptureGst) << "Image available!" ; |
190 | emit imageAvailable(requestId: imageData.id, buffer: frame); |
191 | |
192 | emit imageCaptured(requestId: imageData.id, preview: img); |
193 | |
194 | QMediaMetaData metaData = this->metaData(); |
195 | metaData.insert(k: QMediaMetaData::Date, value: QDateTime::currentDateTime()); |
196 | metaData.insert(k: QMediaMetaData::Resolution, value: frame.size()); |
197 | imageData.metaData = metaData; |
198 | |
199 | // ensure taginject injects this metaData |
200 | const auto &md = static_cast<const QGstreamerMetaData &>(metaData); |
201 | md.setMetaData(muxer.element()); |
202 | |
203 | emit imageMetadataAvailable(id: imageData.id, metaData); |
204 | |
205 | return true; |
206 | } |
207 | |
208 | void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session) |
209 | { |
210 | QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session); |
211 | if (m_session == captureSession) |
212 | return; |
213 | |
214 | bool readyForCapture = isReadyForCapture(); |
215 | if (m_session) { |
216 | disconnect(sender: m_session, signal: nullptr, receiver: this, member: nullptr); |
217 | m_lastId = 0; |
218 | pendingImages.clear(); |
219 | passImage = false; |
220 | cameraActive = false; |
221 | } |
222 | |
223 | m_session = captureSession; |
224 | if (!m_session) { |
225 | if (readyForCapture) |
226 | emit readyForCaptureChanged(ready: false); |
227 | return; |
228 | } |
229 | |
230 | connect(sender: m_session, signal: &QPlatformMediaCaptureSession::cameraChanged, context: this, slot: &QGstreamerImageCapture::onCameraChanged); |
231 | onCameraChanged(); |
232 | } |
233 | |
234 | void QGstreamerImageCapture::cameraActiveChanged(bool active) |
235 | { |
236 | qCDebug(qLcImageCaptureGst) << "cameraActiveChanged" << cameraActive << active; |
237 | if (cameraActive == active) |
238 | return; |
239 | cameraActive = active; |
240 | qCDebug(qLcImageCaptureGst) << "isReady" << isReadyForCapture(); |
241 | emit readyForCaptureChanged(ready: isReadyForCapture()); |
242 | } |
243 | |
244 | void QGstreamerImageCapture::onCameraChanged() |
245 | { |
246 | if (m_session->camera()) { |
247 | cameraActiveChanged(active: m_session->camera()->isActive()); |
248 | connect(sender: m_session->camera(), signal: &QPlatformCamera::activeChanged, context: this, slot: &QGstreamerImageCapture::cameraActiveChanged); |
249 | } else { |
250 | cameraActiveChanged(active: false); |
251 | } |
252 | } |
253 | |
254 | gboolean QGstreamerImageCapture::saveImageFilter(GstElement *element, |
255 | GstBuffer *buffer, |
256 | GstPad *pad, |
257 | void *appdata) |
258 | { |
259 | Q_UNUSED(element); |
260 | Q_UNUSED(pad); |
261 | QGstreamerImageCapture *capture = static_cast<QGstreamerImageCapture *>(appdata); |
262 | |
263 | capture->passImage = false; |
264 | |
265 | if (capture->pendingImages.isEmpty()) { |
266 | return true; |
267 | } |
268 | |
269 | auto imageData = capture->pendingImages.dequeue(); |
270 | if (imageData.filename.isEmpty()) { |
271 | return true; |
272 | } |
273 | |
274 | qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename; |
275 | |
276 | QFile f(imageData.filename); |
277 | if (f.open(flags: QFile::WriteOnly)) { |
278 | GstMapInfo info; |
279 | if (gst_buffer_map(buffer, info: &info, flags: GST_MAP_READ)) { |
280 | f.write(data: reinterpret_cast<const char *>(info.data), len: info.size); |
281 | gst_buffer_unmap(buffer, info: &info); |
282 | } |
283 | f.close(); |
284 | |
285 | static QMetaMethod savedSignal = QMetaMethod::fromSignal(signal: &QGstreamerImageCapture::imageSaved); |
286 | savedSignal.invoke(obj: capture, |
287 | c: Qt::QueuedConnection, |
288 | Q_ARG(int, imageData.id), |
289 | Q_ARG(QString, imageData.filename)); |
290 | } else { |
291 | qCDebug(qLcImageCaptureGst) << " could not open image file for writing" ; |
292 | } |
293 | |
294 | return TRUE; |
295 | } |
296 | |
297 | QImageEncoderSettings QGstreamerImageCapture::imageSettings() const |
298 | { |
299 | return m_settings; |
300 | } |
301 | |
302 | void QGstreamerImageCapture::setImageSettings(const QImageEncoderSettings &settings) |
303 | { |
304 | if (m_settings != settings) { |
305 | QSize resolution = settings.resolution(); |
306 | if (m_settings.resolution() != resolution && !resolution.isEmpty()) { |
307 | setResolution(resolution); |
308 | } |
309 | m_settings = settings; |
310 | } |
311 | } |
312 | |
313 | QT_END_NAMESPACE |
314 | |
315 | #include "moc_qgstreamerimagecapture_p.cpp" |
316 | |