1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <mediacapture/qgstreamermediacapturesession_p.h>
5#include <mediacapture/qgstreamermediarecorder_p.h>
6#include <mediacapture/qgstreamerimagecapture_p.h>
7#include <mediacapture/qgstreamercamera_p.h>
8#include <common/qgstpipeline_p.h>
9#include <common/qgstreameraudioinput_p.h>
10#include <common/qgstreameraudiooutput_p.h>
11#include <common/qgstreamervideooutput_p.h>
12#include <common/qgst_debug_p.h>
13
14#include <QtCore/qloggingcategory.h>
15#include <QtCore/private/quniquehandle_p.h>
16
17QT_BEGIN_NAMESPACE
18
19namespace {
20
21QGstElement makeTee(const char *name)
22{
23 QGstElement tee = QGstElement::createFromFactory(factory: "tee", name);
24 tee.set(property: "allow-not-linked", b: true);
25 return tee;
26}
27
28template <typename Functor>
29void executeWhilePadsAreIdle(QSpan<QGstPad> pads, Functor &&f)
30{
31 if (pads.isEmpty())
32 f();
33
34 if (!pads.front())
35 return executeWhilePadsAreIdle(pads.subspan(pos: 1), f);
36
37 if (pads.size() == 1)
38 pads.front().modifyPipelineInIdleProbe(f);
39 else {
40 auto remain = pads.subspan(pos: 1);
41 pads.front().modifyPipelineInIdleProbe([&] {
42 executeWhilePadsAreIdle(remain, f);
43 });
44 }
45}
46
47void setStateOnElements(QSpan<const QGstElement> elements, GstState state)
48{
49 for (QGstElement element : elements)
50 if (element)
51 element.setState(state);
52}
53
54void finishStateChangeOnElements(QSpan<const QGstElement> elements)
55{
56 for (QGstElement element : elements)
57 if (element)
58 element.finishStateChange();
59}
60
61} // namespace
62
63QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCaptureSession::create()
64{
65 auto videoOutput = QGstreamerVideoOutput::create();
66 if (!videoOutput)
67 return videoOutput.error();
68
69 static const auto error = qGstErrorMessageIfElementsNotAvailable(arg: "tee", args: "capsfilter");
70 if (error)
71 return *error;
72
73 return new QGstreamerMediaCaptureSession(videoOutput.value());
74}
75
76QGstreamerMediaCaptureSession::QGstreamerMediaCaptureSession(QGstreamerVideoOutput *videoOutput)
77 : capturePipeline{
78 QGstPipeline::create(name: "mediaCapturePipeline"),
79 },
80 gstAudioTee{
81 makeTee(name: "audioTee"),
82 },
83 audioSrcPadForEncoder{ gstAudioTee.getRequestPad(name: "src_%u") },
84 audioSrcPadForOutput{ gstAudioTee.getRequestPad(name: "src_%u") },
85 gstVideoTee{
86 makeTee(name: "videoTee"),
87 },
88 videoSrcPadForEncoder{ gstVideoTee.getRequestPad(name: "src_%u") },
89 videoSrcPadForOutput{ gstVideoTee.getRequestPad(name: "src_%u") },
90 videoSrcPadForImageCapture{ gstVideoTee.getRequestPad(name: "src_%u") },
91 gstVideoOutput(videoOutput)
92{
93 gstVideoOutput->setParent(this);
94 gstVideoOutput->setIsPreview();
95
96 capturePipeline.installMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this));
97 capturePipeline.set(property: "message-forward", b: true);
98
99 // Use system clock to drive all elements in the pipeline. Otherwise,
100 // the clock is sourced from the elements (e.g. from an audio source).
101 // Since the elements are added and removed dynamically the clock would
102 // also change causing lost of synchronization in the pipeline.
103
104 QGstClockHandle systemClock{
105 gst_system_clock_obtain(),
106 };
107 gst_pipeline_use_clock(pipeline: capturePipeline.pipeline(), clock: systemClock.get());
108
109 // This is the recording pipeline with only live sources, thus the pipeline
110 // will be always in the playing state.
111 capturePipeline.setState(GST_STATE_PLAYING);
112 gstVideoOutput->setActive(true);
113
114 capturePipeline.dumpGraph(fileNamePrefix: "initial");
115}
116
117QGstPad QGstreamerMediaCaptureSession::imageCaptureSink()
118{
119 return m_imageCapture ? m_imageCapture->gstElement().staticPad(name: "sink") : QGstPad{};
120}
121
122QGstPad QGstreamerMediaCaptureSession::videoOutputSink()
123{
124 return gstVideoOutput ? gstVideoOutput->gstElement().staticPad(name: "sink") : QGstPad{};
125}
126
127QGstPad QGstreamerMediaCaptureSession::audioOutputSink()
128{
129 return gstAudioOutput ? gstAudioOutput->gstElement().staticPad(name: "sink") : QGstPad{};
130}
131
132QGstreamerMediaCaptureSession::~QGstreamerMediaCaptureSession()
133{
134 setMediaRecorder(nullptr);
135 setImageCapture(nullptr);
136 setCamera(nullptr);
137 capturePipeline.removeMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this));
138 capturePipeline.setStateSync(state: GST_STATE_READY);
139 capturePipeline.setStateSync(state: GST_STATE_NULL);
140}
141
142QPlatformCamera *QGstreamerMediaCaptureSession::camera()
143{
144 return gstCamera;
145}
146
147void QGstreamerMediaCaptureSession::setCamera(QPlatformCamera *platformCamera)
148{
149 auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera);
150 if (gstCamera == camera)
151 return;
152
153 if (gstCamera) {
154 QObject::disconnect(gstCameraActiveConnection);
155 if (gstVideoTee)
156 setCameraActive(false);
157 }
158
159 gstCamera = camera;
160
161 if (gstCamera) {
162 gstCameraActiveConnection =
163 QObject::connect(sender: camera, signal: &QPlatformCamera::activeChanged, context: this,
164 slot: &QGstreamerMediaCaptureSession::setCameraActive);
165 if (gstCamera->isActive())
166 setCameraActive(true);
167 }
168
169 emit cameraChanged();
170}
171
172void QGstreamerMediaCaptureSession::setCameraActive(bool activate)
173{
174 std::array padsToSync = {
175 videoSrcPadForEncoder,
176 videoSrcPadForImageCapture,
177 videoSrcPadForOutput,
178 gstVideoTee.sink(),
179 };
180
181 QGstElement cameraElement = gstCamera->gstElement();
182 QGstElement videoOutputElement = gstVideoOutput->gstElement();
183
184 if (activate) {
185 gstCamera->setCaptureSession(this);
186 capturePipeline.add(ts: gstVideoTee);
187
188 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
189 capturePipeline.add(ts: cameraElement);
190 if (videoOutputElement)
191 capturePipeline.add(ts: videoOutputElement);
192
193 if (m_currentRecorderState && m_currentRecorderState->videoSink)
194 videoSrcPadForEncoder.link(sink: m_currentRecorderState->videoSink);
195 if (videoOutputElement)
196 videoSrcPadForOutput.link(sink: videoOutputSink());
197 if (m_imageCapture)
198 videoSrcPadForImageCapture.link(sink: imageCaptureSink());
199
200 qLinkGstElements(ts: cameraElement, ts: gstVideoTee);
201
202 setStateOnElements(elements: { gstVideoTee, cameraElement, videoOutputElement },
203 state: GST_STATE_PLAYING);
204 });
205
206 finishStateChangeOnElements(elements: { gstVideoTee, cameraElement, videoOutputElement });
207
208 for (QGstElement addedElement : { gstVideoTee, cameraElement, videoOutputElement })
209 addedElement.finishStateChange();
210
211 } else {
212 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
213 for (QGstPad &pad : padsToSync)
214 pad.unlinkPeer();
215 });
216 capturePipeline.stopAndRemoveElements(ts&: cameraElement, ts&: gstVideoTee, ts&: videoOutputElement);
217
218 gstCamera->setCaptureSession(nullptr);
219 }
220
221 capturePipeline.dumpGraph(fileNamePrefix: "camera");
222}
223
224QPlatformImageCapture *QGstreamerMediaCaptureSession::imageCapture()
225{
226 return m_imageCapture;
227}
228
229void QGstreamerMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
230{
231 QGstreamerImageCapture *control = static_cast<QGstreamerImageCapture *>(imageCapture);
232 if (m_imageCapture == control)
233 return;
234
235 videoSrcPadForEncoder.modifyPipelineInIdleProbe(f: [&] {
236 if (m_imageCapture) {
237 qUnlinkGstElements(ts: gstVideoTee, ts: m_imageCapture->gstElement());
238 capturePipeline.stopAndRemoveElements(ts: m_imageCapture->gstElement());
239 m_imageCapture->setCaptureSession(nullptr);
240 }
241
242 m_imageCapture = control;
243
244 if (m_imageCapture) {
245 capturePipeline.add(ts: m_imageCapture->gstElement());
246 videoSrcPadForImageCapture.link(sink: imageCaptureSink());
247 m_imageCapture->setCaptureSession(this);
248 m_imageCapture->gstElement().setState(GST_STATE_PLAYING);
249 }
250 });
251 if (m_imageCapture)
252 m_imageCapture->gstElement().finishStateChange();
253
254 capturePipeline.dumpGraph(fileNamePrefix: "imageCapture");
255
256 emit imageCaptureChanged();
257}
258
259void QGstreamerMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
260{
261 QGstreamerMediaRecorder *control = static_cast<QGstreamerMediaRecorder *>(recorder);
262 if (m_mediaRecorder == control)
263 return;
264
265 if (m_mediaRecorder)
266 m_mediaRecorder->setCaptureSession(nullptr);
267 m_mediaRecorder = control;
268 if (m_mediaRecorder)
269 m_mediaRecorder->setCaptureSession(this);
270
271 emit encoderChanged();
272 capturePipeline.dumpGraph(fileNamePrefix: "encoder");
273}
274
275QPlatformMediaRecorder *QGstreamerMediaCaptureSession::mediaRecorder()
276{
277 return m_mediaRecorder;
278}
279
280void QGstreamerMediaCaptureSession::linkAndStartEncoder(RecorderElements recorder,
281 const QMediaMetaData &metadata)
282{
283 Q_ASSERT(!m_currentRecorderState);
284
285 std::array padsToSync = {
286 audioSrcPadForEncoder,
287 videoSrcPadForEncoder,
288 };
289
290 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
291 capturePipeline.add(ts: recorder.encodeBin, ts: recorder.fileSink);
292 qLinkGstElements(ts: recorder.encodeBin, ts: recorder.fileSink);
293
294 applyMetaDataToTagSetter(metadata, recorder.encodeBin);
295
296 if (recorder.videoSink) {
297 QGstCaps capsFromCamera = gstVideoTee.sink().currentCaps();
298
299 encoderVideoCapsFilter =
300 QGstElement::createFromFactory(factory: "capsfilter", name: "encoderVideoCapsFilter");
301 encoderVideoCapsFilter.set(property: "caps", c: capsFromCamera);
302
303 capturePipeline.add(ts: encoderVideoCapsFilter);
304 encoderVideoCapsFilter.src().link(sink: recorder.videoSink);
305 videoSrcPadForEncoder.link(sink: encoderVideoCapsFilter.sink());
306 }
307
308 if (recorder.audioSink) {
309 QGstCaps capsFromInput = gstAudioTee.sink().currentCaps();
310
311 encoderAudioCapsFilter =
312 QGstElement::createFromFactory(factory: "capsfilter", name: "encoderAudioCapsFilter");
313
314 encoderAudioCapsFilter.set(property: "caps", c: capsFromInput);
315
316 capturePipeline.add(ts: encoderAudioCapsFilter);
317
318 encoderAudioCapsFilter.src().link(sink: recorder.audioSink);
319 audioSrcPadForEncoder.link(sink: encoderAudioCapsFilter.sink());
320 }
321 setStateOnElements(elements: { recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
322 encoderAudioCapsFilter },
323 state: GST_STATE_PLAYING);
324
325 GstEvent *event = gst_event_new_reconfigure();
326 gst_element_send_event(element: recorder.fileSink.element(), event);
327 });
328
329 finishStateChangeOnElements(elements: { recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
330 encoderAudioCapsFilter });
331
332 m_currentRecorderState = std::move(recorder);
333}
334
335void QGstreamerMediaCaptureSession::unlinkRecorder()
336{
337 std::array padsToSync = {
338 audioSrcPadForEncoder,
339 videoSrcPadForEncoder,
340 };
341
342 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
343 if (encoderVideoCapsFilter)
344 qUnlinkGstElements(ts: gstVideoTee, ts: encoderVideoCapsFilter);
345
346 if (encoderAudioCapsFilter)
347 qUnlinkGstElements(ts: gstAudioTee, ts: encoderAudioCapsFilter);
348 });
349
350 if (encoderVideoCapsFilter) {
351 capturePipeline.stopAndRemoveElements(ts&: encoderVideoCapsFilter);
352 encoderVideoCapsFilter = {};
353 }
354
355 if (encoderAudioCapsFilter) {
356 capturePipeline.stopAndRemoveElements(ts&: encoderAudioCapsFilter);
357 encoderAudioCapsFilter = {};
358 }
359
360 m_currentRecorderState->encodeBin.sendEos();
361}
362
363void QGstreamerMediaCaptureSession::finalizeRecorder()
364{
365 capturePipeline.stopAndRemoveElements(ts&: m_currentRecorderState->encodeBin,
366 ts&: m_currentRecorderState->fileSink);
367
368 m_currentRecorderState = std::nullopt;
369}
370
371const QGstPipeline &QGstreamerMediaCaptureSession::pipeline() const
372{
373 return capturePipeline;
374}
375
376void QGstreamerMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
377{
378 if (gstAudioInput == input)
379 return;
380
381 if (input && !gstAudioInput) {
382 // a new input is connected, we need to add/link the audio tee and audio tee
383
384 capturePipeline.add(ts: gstAudioTee);
385
386 std::array padsToSync = {
387 audioSrcPadForEncoder,
388 audioSrcPadForOutput,
389 gstAudioTee.sink(),
390 };
391
392 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
393 if (m_currentRecorderState && m_currentRecorderState->audioSink)
394 audioSrcPadForEncoder.link(sink: m_currentRecorderState->audioSink);
395 if (gstAudioOutput) {
396 capturePipeline.add(ts: gstAudioOutput->gstElement());
397 audioSrcPadForOutput.link(sink: audioOutputSink());
398 }
399
400 gstAudioInput = static_cast<QGstreamerAudioInput *>(input);
401 capturePipeline.add(ts: gstAudioInput->gstElement());
402
403 qLinkGstElements(ts: gstAudioInput->gstElement(), ts: gstAudioTee);
404
405 gstAudioTee.setState(GST_STATE_PLAYING);
406 if (gstAudioOutput)
407 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
408 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
409 });
410
411 } else if (!input && gstAudioInput) {
412 // input has been removed, unlink and remove audio output and audio tee
413
414 std::array padsToSync = {
415 audioSrcPadForEncoder,
416 audioSrcPadForOutput,
417 gstAudioTee.sink(),
418 };
419
420 executeWhilePadsAreIdle(pads: padsToSync, f: [&] {
421 for (QGstPad &pad : padsToSync)
422 pad.unlinkPeer();
423 });
424
425 capturePipeline.stopAndRemoveElements(ts&: gstAudioTee);
426 if (gstAudioOutput)
427 capturePipeline.stopAndRemoveElements(ts: gstAudioOutput->gstElement());
428 capturePipeline.stopAndRemoveElements(ts: gstAudioInput->gstElement());
429
430 gstAudioInput = nullptr;
431 } else {
432 QGstElement oldInputElement = gstAudioInput->gstElement();
433
434 gstAudioTee.sink().modifyPipelineInIdleProbe(f: [&] {
435 oldInputElement.sink().unlinkPeer();
436 gstAudioInput = static_cast<QGstreamerAudioInput *>(input);
437 capturePipeline.add(ts: gstAudioInput->gstElement());
438
439 qLinkGstElements(ts: gstAudioInput->gstElement(), ts: gstAudioTee);
440
441 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
442 });
443
444 gstAudioInput->gstElement().finishStateChange();
445
446 capturePipeline.stopAndRemoveElements(ts: gstAudioInput->gstElement());
447 }
448}
449
450void QGstreamerMediaCaptureSession::setVideoPreview(QVideoSink *sink)
451{
452 auto *gstSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr;
453 if (gstSink)
454 gstSink->setAsync(false);
455
456 gstVideoOutput->setVideoSink(sink);
457 capturePipeline.dumpGraph(fileNamePrefix: "setVideoPreview");
458}
459
460void QGstreamerMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
461{
462 if (gstAudioOutput == output)
463 return;
464
465 auto *gstOutput = static_cast<QGstreamerAudioOutput *>(output);
466 if (gstOutput)
467 gstOutput->setAsync(false);
468
469 if (!gstAudioInput) {
470 // audio output is not active, since there is no audio input
471 gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
472 } else {
473 QGstElement oldOutputElement =
474 gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
475 gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
476
477 audioSrcPadForOutput.modifyPipelineInIdleProbe(f: [&] {
478 if (oldOutputElement)
479 oldOutputElement.sink().unlinkPeer();
480
481 if (gstAudioOutput) {
482 capturePipeline.add(ts: gstAudioOutput->gstElement());
483 audioSrcPadForOutput.link(sink: gstAudioOutput->gstElement().staticPad(name: "sink"));
484 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
485 }
486 });
487
488 if (gstAudioOutput)
489 gstAudioOutput->gstElement().finishStateChange();
490
491 if (oldOutputElement)
492 capturePipeline.stopAndRemoveElements(ts&: oldOutputElement);
493 }
494}
495
496QGstreamerVideoSink *QGstreamerMediaCaptureSession::gstreamerVideoSink() const
497{
498 return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr;
499}
500
501bool QGstreamerMediaCaptureSession::processBusMessage(const QGstreamerMessage &msg)
502{
503 if (m_mediaRecorder)
504 m_mediaRecorder->processBusMessage(message: msg);
505
506 switch (msg.type()) {
507 case GST_MESSAGE_ERROR:
508 return processBusMessageError(msg);
509
510 case GST_MESSAGE_LATENCY:
511 return processBusMessageLatency(msg);
512
513 default:
514 break;
515 }
516
517 return false;
518}
519
520bool QGstreamerMediaCaptureSession::processBusMessageError(const QGstreamerMessage &msg)
521{
522 QUniqueGErrorHandle error;
523 QUniqueGStringHandle message;
524 gst_message_parse_error(message: msg.message(), gerror: &error, debug: &message);
525
526 qWarning() << "QGstreamerMediaCapture: received error from gstreamer" << error << message;
527 capturePipeline.dumpGraph(fileNamePrefix: "captureError");
528
529 return false;
530}
531
532bool QGstreamerMediaCaptureSession::processBusMessageLatency(const QGstreamerMessage &)
533{
534 capturePipeline.recalculateLatency();
535 return false;
536}
537
538QT_END_NAMESPACE
539

Provided by KDAB

Privacy Policy
Learn Advanced QML with KDAB
Find out more

source code of qtmultimedia/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapturesession.cpp