| 1 | // Copyright (C) 2016 The Qt Company Ltd. | 
| 2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only | 
| 3 |  | 
| 4 | #include <mediacapture/qgstreamermediacapturesession_p.h> | 
| 5 | #include <mediacapture/qgstreamermediarecorder_p.h> | 
| 6 | #include <mediacapture/qgstreamerimagecapture_p.h> | 
| 7 | #include <mediacapture/qgstreamercamera_p.h> | 
| 8 | #include <common/qgstpipeline_p.h> | 
| 9 | #include <common/qgstreameraudioinput_p.h> | 
| 10 | #include <common/qgstreameraudiooutput_p.h> | 
| 11 | #include <common/qgstreamervideooutput_p.h> | 
| 12 | #include <common/qgst_debug_p.h> | 
| 13 |  | 
| 14 | #include <QtCore/qloggingcategory.h> | 
| 15 | #include <QtCore/private/quniquehandle_p.h> | 
| 16 |  | 
| 17 | QT_BEGIN_NAMESPACE | 
| 18 |  | 
| 19 | namespace { | 
| 20 |  | 
| 21 | QGstElement makeTee(const char *name) | 
| 22 | { | 
| 23 |     QGstElement tee = QGstElement::createFromFactory(factory: "tee" , name); | 
| 24 |     tee.set(property: "allow-not-linked" , b: true); | 
| 25 |     return tee; | 
| 26 | } | 
| 27 |  | 
| 28 | template <typename Functor> | 
| 29 | void executeWhilePadsAreIdle(QSpan<QGstPad> pads, Functor &&f) | 
| 30 | { | 
| 31 |     if (pads.isEmpty()) | 
| 32 |         f(); | 
| 33 |  | 
| 34 |     if (!pads.front()) | 
| 35 |         return executeWhilePadsAreIdle(pads.subspan(pos: 1), f); | 
| 36 |  | 
| 37 |     if (pads.size() == 1) | 
| 38 |         pads.front().modifyPipelineInIdleProbe(f); | 
| 39 |     else { | 
| 40 |         auto remain = pads.subspan(pos: 1); | 
| 41 |         pads.front().modifyPipelineInIdleProbe([&] { | 
| 42 |             executeWhilePadsAreIdle(remain, f); | 
| 43 |         }); | 
| 44 |     } | 
| 45 | } | 
| 46 |  | 
| 47 | void setStateOnElements(QSpan<const QGstElement> elements, GstState state) | 
| 48 | { | 
| 49 |     for (QGstElement element : elements) | 
| 50 |         if (element) | 
| 51 |             element.setState(state); | 
| 52 | } | 
| 53 |  | 
| 54 | void finishStateChangeOnElements(QSpan<const QGstElement> elements) | 
| 55 | { | 
| 56 |     for (QGstElement element : elements) | 
| 57 |         if (element) | 
| 58 |             element.finishStateChange(); | 
| 59 | } | 
| 60 |  | 
| 61 | } // namespace | 
| 62 |  | 
| 63 | QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCaptureSession::create() | 
| 64 | { | 
| 65 |     auto videoOutput = QGstreamerVideoOutput::create(); | 
| 66 |     if (!videoOutput) | 
| 67 |         return videoOutput.error(); | 
| 68 |  | 
| 69 |     static const auto error = qGstErrorMessageIfElementsNotAvailable(arg: "tee" , args: "capsfilter" ); | 
| 70 |     if (error) | 
| 71 |         return *error; | 
| 72 |  | 
| 73 |     return new QGstreamerMediaCaptureSession(videoOutput.value()); | 
| 74 | } | 
| 75 |  | 
| 76 | QGstreamerMediaCaptureSession::QGstreamerMediaCaptureSession(QGstreamerVideoOutput *videoOutput) | 
| 77 |     : capturePipeline{ | 
| 78 |           QGstPipeline::create(name: "mediaCapturePipeline" ), | 
| 79 |       }, | 
| 80 |       gstAudioTee{ | 
| 81 |           makeTee(name: "audioTee" ), | 
| 82 |       }, | 
| 83 |       audioSrcPadForEncoder{ gstAudioTee.getRequestPad(name: "src_%u" ) }, | 
| 84 |       audioSrcPadForOutput{ gstAudioTee.getRequestPad(name: "src_%u" ) }, | 
| 85 |       gstVideoTee{ | 
| 86 |           makeTee(name: "videoTee" ), | 
| 87 |       }, | 
| 88 |       videoSrcPadForEncoder{ gstVideoTee.getRequestPad(name: "src_%u" ) }, | 
| 89 |       videoSrcPadForOutput{ gstVideoTee.getRequestPad(name: "src_%u" ) }, | 
| 90 |       videoSrcPadForImageCapture{ gstVideoTee.getRequestPad(name: "src_%u" ) }, | 
| 91 |       gstVideoOutput(videoOutput) | 
| 92 | { | 
| 93 |     gstVideoOutput->setParent(this); | 
| 94 |     gstVideoOutput->setIsPreview(); | 
| 95 |  | 
| 96 |     capturePipeline.installMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this)); | 
| 97 |     capturePipeline.set(property: "message-forward" , b: true); | 
| 98 |  | 
| 99 |     // Use system clock to drive all elements in the pipeline. Otherwise, | 
| 100 |     // the clock is sourced from the elements (e.g. from an audio source). | 
| 101 |     // Since the elements are added and removed dynamically the clock would | 
| 102 |     // also change causing lost of synchronization in the pipeline. | 
| 103 |  | 
| 104 |     QGstClockHandle systemClock{ | 
| 105 |         gst_system_clock_obtain(), | 
| 106 |         QGstClockHandle::HasRef, | 
| 107 |     }; | 
| 108 |     gst_pipeline_use_clock(pipeline: capturePipeline.pipeline(), clock: systemClock.get()); | 
| 109 |  | 
| 110 |     // This is the recording pipeline with only live sources, thus the pipeline | 
| 111 |     // will be always in the playing state. | 
| 112 |     capturePipeline.setState(GST_STATE_PLAYING); | 
| 113 |     gstVideoOutput->setActive(true); | 
| 114 |  | 
| 115 |     capturePipeline.dumpGraph(fileNamePrefix: "initial" ); | 
| 116 | } | 
| 117 |  | 
| 118 | QGstPad QGstreamerMediaCaptureSession::imageCaptureSink() | 
| 119 | { | 
| 120 |     return m_imageCapture ? m_imageCapture->gstElement().staticPad(name: "sink" ) : QGstPad{}; | 
| 121 | } | 
| 122 |  | 
| 123 | QGstPad QGstreamerMediaCaptureSession::videoOutputSink() | 
| 124 | { | 
| 125 |     return gstVideoOutput ? gstVideoOutput->gstElement().staticPad(name: "sink" ) : QGstPad{}; | 
| 126 | } | 
| 127 |  | 
| 128 | QGstPad QGstreamerMediaCaptureSession::audioOutputSink() | 
| 129 | { | 
| 130 |     return gstAudioOutput ? gstAudioOutput->gstElement().staticPad(name: "sink" ) : QGstPad{}; | 
| 131 | } | 
| 132 |  | 
| 133 | QGstreamerMediaCaptureSession::~QGstreamerMediaCaptureSession() | 
| 134 | { | 
| 135 |     setMediaRecorder(nullptr); | 
| 136 |     setImageCapture(nullptr); | 
| 137 |     setCamera(nullptr); | 
| 138 |     capturePipeline.removeMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this)); | 
| 139 |     capturePipeline.setStateSync(state: GST_STATE_READY); | 
| 140 |     capturePipeline.setStateSync(state: GST_STATE_NULL); | 
| 141 | } | 
| 142 |  | 
| 143 | QPlatformCamera *QGstreamerMediaCaptureSession::camera() | 
| 144 | { | 
| 145 |     return gstCamera; | 
| 146 | } | 
| 147 |  | 
| 148 | void QGstreamerMediaCaptureSession::setCamera(QPlatformCamera *platformCamera) | 
| 149 | { | 
| 150 |     auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera); | 
| 151 |     if (gstCamera == camera) | 
| 152 |         return; | 
| 153 |  | 
| 154 |     if (gstCamera) { | 
| 155 |         QObject::disconnect(gstCameraActiveConnection); | 
| 156 |         if (gstVideoTee) | 
| 157 |             setCameraActive(false); | 
| 158 |     } | 
| 159 |  | 
| 160 |     gstCamera = camera; | 
| 161 |  | 
| 162 |     if (gstCamera) { | 
| 163 |         gstCameraActiveConnection = | 
| 164 |                 QObject::connect(sender: camera, signal: &QPlatformCamera::activeChanged, context: this, | 
| 165 |                                  slot: &QGstreamerMediaCaptureSession::setCameraActive); | 
| 166 |         if (gstCamera->isActive()) | 
| 167 |             setCameraActive(true); | 
| 168 |     } | 
| 169 |  | 
| 170 |     emit cameraChanged(); | 
| 171 | } | 
| 172 |  | 
| 173 | void QGstreamerMediaCaptureSession::setCameraActive(bool activate) | 
| 174 | { | 
| 175 |     std::array padsToSync = { | 
| 176 |         videoSrcPadForEncoder, | 
| 177 |         videoSrcPadForImageCapture, | 
| 178 |         videoSrcPadForOutput, | 
| 179 |         gstVideoTee.sink(), | 
| 180 |     }; | 
| 181 |  | 
| 182 |     QGstElement cameraElement = gstCamera->gstElement(); | 
| 183 |     QGstElement videoOutputElement = gstVideoOutput->gstElement(); | 
| 184 |  | 
| 185 |     if (activate) { | 
| 186 |         gstCamera->setCaptureSession(this); | 
| 187 |         capturePipeline.add(ts: gstVideoTee); | 
| 188 |  | 
| 189 |         executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 190 |             capturePipeline.add(ts: cameraElement); | 
| 191 |             if (videoOutputElement) | 
| 192 |                 capturePipeline.add(ts: videoOutputElement); | 
| 193 |  | 
| 194 |             if (m_currentRecorderState && m_currentRecorderState->videoSink) | 
| 195 |                 videoSrcPadForEncoder.link(sink: m_currentRecorderState->videoSink); | 
| 196 |             if (videoOutputElement) | 
| 197 |                 videoSrcPadForOutput.link(sink: videoOutputSink()); | 
| 198 |             if (m_imageCapture) | 
| 199 |                 videoSrcPadForImageCapture.link(sink: imageCaptureSink()); | 
| 200 |  | 
| 201 |             qLinkGstElements(ts: cameraElement, ts: gstVideoTee); | 
| 202 |  | 
| 203 |             setStateOnElements(elements: { gstVideoTee, cameraElement, videoOutputElement }, | 
| 204 |                                state: GST_STATE_PLAYING); | 
| 205 |         }); | 
| 206 |  | 
| 207 |         finishStateChangeOnElements(elements: { gstVideoTee, cameraElement, videoOutputElement }); | 
| 208 |  | 
| 209 |         for (QGstElement addedElement : { gstVideoTee, cameraElement, videoOutputElement }) | 
| 210 |             addedElement.finishStateChange(); | 
| 211 |  | 
| 212 |     } else { | 
| 213 |         executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 214 |             for (QGstPad &pad : padsToSync) | 
| 215 |                 pad.unlinkPeer(); | 
| 216 |         }); | 
| 217 |         capturePipeline.stopAndRemoveElements(ts&: cameraElement, ts&: gstVideoTee, ts&: videoOutputElement); | 
| 218 |  | 
| 219 |         gstCamera->setCaptureSession(nullptr); | 
| 220 |     } | 
| 221 |  | 
| 222 |     capturePipeline.dumpGraph(fileNamePrefix: "camera" ); | 
| 223 | } | 
| 224 |  | 
| 225 | QPlatformImageCapture *QGstreamerMediaCaptureSession::imageCapture() | 
| 226 | { | 
| 227 |     return m_imageCapture; | 
| 228 | } | 
| 229 |  | 
| 230 | void QGstreamerMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture) | 
| 231 | { | 
| 232 |     QGstreamerImageCapture *control = static_cast<QGstreamerImageCapture *>(imageCapture); | 
| 233 |     if (m_imageCapture == control) | 
| 234 |         return; | 
| 235 |  | 
| 236 |     videoSrcPadForEncoder.modifyPipelineInIdleProbe(f: [&] { | 
| 237 |         if (m_imageCapture) { | 
| 238 |             qUnlinkGstElements(ts: gstVideoTee, ts: m_imageCapture->gstElement()); | 
| 239 |             capturePipeline.stopAndRemoveElements(ts: m_imageCapture->gstElement()); | 
| 240 |             m_imageCapture->setCaptureSession(nullptr); | 
| 241 |         } | 
| 242 |  | 
| 243 |         m_imageCapture = control; | 
| 244 |  | 
| 245 |         if (m_imageCapture) { | 
| 246 |             capturePipeline.add(ts: m_imageCapture->gstElement()); | 
| 247 |             videoSrcPadForImageCapture.link(sink: imageCaptureSink()); | 
| 248 |             m_imageCapture->setCaptureSession(this); | 
| 249 |             m_imageCapture->gstElement().setState(GST_STATE_PLAYING); | 
| 250 |         } | 
| 251 |     }); | 
| 252 |     if (m_imageCapture) | 
| 253 |         m_imageCapture->gstElement().finishStateChange(); | 
| 254 |  | 
| 255 |     capturePipeline.dumpGraph(fileNamePrefix: "imageCapture" ); | 
| 256 |  | 
| 257 |     emit imageCaptureChanged(); | 
| 258 | } | 
| 259 |  | 
| 260 | void QGstreamerMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder) | 
| 261 | { | 
| 262 |     QGstreamerMediaRecorder *control = static_cast<QGstreamerMediaRecorder *>(recorder); | 
| 263 |     if (m_mediaRecorder == control) | 
| 264 |         return; | 
| 265 |  | 
| 266 |     if (m_mediaRecorder) | 
| 267 |         m_mediaRecorder->setCaptureSession(nullptr); | 
| 268 |     m_mediaRecorder = control; | 
| 269 |     if (m_mediaRecorder) | 
| 270 |         m_mediaRecorder->setCaptureSession(this); | 
| 271 |  | 
| 272 |     emit encoderChanged(); | 
| 273 |     capturePipeline.dumpGraph(fileNamePrefix: "encoder" ); | 
| 274 | } | 
| 275 |  | 
| 276 | QPlatformMediaRecorder *QGstreamerMediaCaptureSession::mediaRecorder() | 
| 277 | { | 
| 278 |     return m_mediaRecorder; | 
| 279 | } | 
| 280 |  | 
| 281 | void QGstreamerMediaCaptureSession::linkAndStartEncoder(RecorderElements recorder, | 
| 282 |                                                         const QMediaMetaData &metadata) | 
| 283 | { | 
| 284 |     Q_ASSERT(!m_currentRecorderState); | 
| 285 |  | 
| 286 |     std::array padsToSync = { | 
| 287 |         audioSrcPadForEncoder, | 
| 288 |         videoSrcPadForEncoder, | 
| 289 |     }; | 
| 290 |  | 
| 291 |     executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 292 |         capturePipeline.add(ts: recorder.encodeBin, ts: recorder.fileSink); | 
| 293 |         qLinkGstElements(ts: recorder.encodeBin, ts: recorder.fileSink); | 
| 294 |  | 
| 295 |         applyMetaDataToTagSetter(metadata, recorder.encodeBin); | 
| 296 |  | 
| 297 |         if (recorder.videoSink) { | 
| 298 |             QGstCaps capsFromCamera = gstVideoTee.sink().currentCaps(); | 
| 299 |  | 
| 300 |             encoderVideoCapsFilter = | 
| 301 |                     QGstElement::createFromFactory(factory: "capsfilter" , name: "encoderVideoCapsFilter" ); | 
| 302 |             encoderVideoCapsFilter.set(property: "caps" , c: capsFromCamera); | 
| 303 |  | 
| 304 |             capturePipeline.add(ts: encoderVideoCapsFilter); | 
| 305 |             encoderVideoCapsFilter.src().link(sink: recorder.videoSink); | 
| 306 |             videoSrcPadForEncoder.link(sink: encoderVideoCapsFilter.sink()); | 
| 307 |         } | 
| 308 |  | 
| 309 |         if (recorder.audioSink) { | 
| 310 |             QGstCaps capsFromInput = gstAudioTee.sink().currentCaps(); | 
| 311 |  | 
| 312 |             encoderAudioCapsFilter = | 
| 313 |                     QGstElement::createFromFactory(factory: "capsfilter" , name: "encoderAudioCapsFilter" ); | 
| 314 |  | 
| 315 |             encoderAudioCapsFilter.set(property: "caps" , c: capsFromInput); | 
| 316 |  | 
| 317 |             capturePipeline.add(ts: encoderAudioCapsFilter); | 
| 318 |  | 
| 319 |             encoderAudioCapsFilter.src().link(sink: recorder.audioSink); | 
| 320 |             audioSrcPadForEncoder.link(sink: encoderAudioCapsFilter.sink()); | 
| 321 |         } | 
| 322 |         setStateOnElements(elements: { recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter, | 
| 323 |                              encoderAudioCapsFilter }, | 
| 324 |                            state: GST_STATE_PLAYING); | 
| 325 |  | 
| 326 |         GstEvent *event = gst_event_new_reconfigure(); | 
| 327 |         gst_element_send_event(element: recorder.fileSink.element(), event); | 
| 328 |     }); | 
| 329 |  | 
| 330 |     finishStateChangeOnElements(elements: { recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter, | 
| 331 |                                   encoderAudioCapsFilter }); | 
| 332 |  | 
| 333 |     m_currentRecorderState = std::move(recorder); | 
| 334 | } | 
| 335 |  | 
| 336 | void QGstreamerMediaCaptureSession::unlinkRecorder() | 
| 337 | { | 
| 338 |     std::array padsToSync = { | 
| 339 |         audioSrcPadForEncoder, | 
| 340 |         videoSrcPadForEncoder, | 
| 341 |     }; | 
| 342 |  | 
| 343 |     executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 344 |         if (encoderVideoCapsFilter) | 
| 345 |             qUnlinkGstElements(ts: gstVideoTee, ts: encoderVideoCapsFilter); | 
| 346 |  | 
| 347 |         if (encoderAudioCapsFilter) | 
| 348 |             qUnlinkGstElements(ts: gstAudioTee, ts: encoderAudioCapsFilter); | 
| 349 |     }); | 
| 350 |  | 
| 351 |     if (encoderVideoCapsFilter) { | 
| 352 |         capturePipeline.stopAndRemoveElements(ts&: encoderVideoCapsFilter); | 
| 353 |         encoderVideoCapsFilter = {}; | 
| 354 |     } | 
| 355 |  | 
| 356 |     if (encoderAudioCapsFilter) { | 
| 357 |         capturePipeline.stopAndRemoveElements(ts&: encoderAudioCapsFilter); | 
| 358 |         encoderAudioCapsFilter = {}; | 
| 359 |     } | 
| 360 |  | 
| 361 |     m_currentRecorderState->encodeBin.sendEos(); | 
| 362 | } | 
| 363 |  | 
| 364 | void QGstreamerMediaCaptureSession::finalizeRecorder() | 
| 365 | { | 
| 366 |     capturePipeline.stopAndRemoveElements(ts&: m_currentRecorderState->encodeBin, | 
| 367 |                                           ts&: m_currentRecorderState->fileSink); | 
| 368 |  | 
| 369 |     m_currentRecorderState = std::nullopt; | 
| 370 | } | 
| 371 |  | 
| 372 | const QGstPipeline &QGstreamerMediaCaptureSession::pipeline() const | 
| 373 | { | 
| 374 |     return capturePipeline; | 
| 375 | } | 
| 376 |  | 
| 377 | void QGstreamerMediaCaptureSession::setAudioInput(QPlatformAudioInput *input) | 
| 378 | { | 
| 379 |     if (gstAudioInput == input) | 
| 380 |         return; | 
| 381 |  | 
| 382 |     if (input && !gstAudioInput) { | 
| 383 |         // a new input is connected, we need to add/link the audio tee and audio tee | 
| 384 |  | 
| 385 |         capturePipeline.add(ts: gstAudioTee); | 
| 386 |  | 
| 387 |         std::array padsToSync = { | 
| 388 |             audioSrcPadForEncoder, | 
| 389 |             audioSrcPadForOutput, | 
| 390 |             gstAudioTee.sink(), | 
| 391 |         }; | 
| 392 |  | 
| 393 |         executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 394 |             if (m_currentRecorderState && m_currentRecorderState->audioSink) | 
| 395 |                 audioSrcPadForEncoder.link(sink: m_currentRecorderState->audioSink); | 
| 396 |             if (gstAudioOutput) { | 
| 397 |                 capturePipeline.add(ts: gstAudioOutput->gstElement()); | 
| 398 |                 audioSrcPadForOutput.link(sink: audioOutputSink()); | 
| 399 |             } | 
| 400 |  | 
| 401 |             gstAudioInput = static_cast<QGstreamerAudioInput *>(input); | 
| 402 |             capturePipeline.add(ts: gstAudioInput->gstElement()); | 
| 403 |  | 
| 404 |             qLinkGstElements(ts: gstAudioInput->gstElement(), ts: gstAudioTee); | 
| 405 |  | 
| 406 |             gstAudioTee.setState(GST_STATE_PLAYING); | 
| 407 |             if (gstAudioOutput) | 
| 408 |                 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING); | 
| 409 |             gstAudioInput->gstElement().setState(GST_STATE_PLAYING); | 
| 410 |         }); | 
| 411 |  | 
| 412 |     } else if (!input && gstAudioInput) { | 
| 413 |         // input has been removed, unlink and remove audio output and audio tee | 
| 414 |  | 
| 415 |         std::array padsToSync = { | 
| 416 |             audioSrcPadForEncoder, | 
| 417 |             audioSrcPadForOutput, | 
| 418 |             gstAudioTee.sink(), | 
| 419 |         }; | 
| 420 |  | 
| 421 |         executeWhilePadsAreIdle(pads: padsToSync, f: [&] { | 
| 422 |             for (QGstPad &pad : padsToSync) | 
| 423 |                 pad.unlinkPeer(); | 
| 424 |         }); | 
| 425 |  | 
| 426 |         capturePipeline.stopAndRemoveElements(ts&: gstAudioTee); | 
| 427 |         if (gstAudioOutput) | 
| 428 |             capturePipeline.stopAndRemoveElements(ts: gstAudioOutput->gstElement()); | 
| 429 |         capturePipeline.stopAndRemoveElements(ts: gstAudioInput->gstElement()); | 
| 430 |  | 
| 431 |         gstAudioInput = nullptr; | 
| 432 |     } else { | 
| 433 |         QGstElement oldInputElement = gstAudioInput->gstElement(); | 
| 434 |  | 
| 435 |         gstAudioTee.sink().modifyPipelineInIdleProbe(f: [&] { | 
| 436 |             oldInputElement.sink().unlinkPeer(); | 
| 437 |             gstAudioInput = static_cast<QGstreamerAudioInput *>(input); | 
| 438 |             capturePipeline.add(ts: gstAudioInput->gstElement()); | 
| 439 |  | 
| 440 |             qLinkGstElements(ts: gstAudioInput->gstElement(), ts: gstAudioTee); | 
| 441 |  | 
| 442 |             gstAudioInput->gstElement().setState(GST_STATE_PLAYING); | 
| 443 |         }); | 
| 444 |  | 
| 445 |         gstAudioInput->gstElement().finishStateChange(); | 
| 446 |  | 
| 447 |         capturePipeline.stopAndRemoveElements(ts: gstAudioInput->gstElement()); | 
| 448 |     } | 
| 449 | } | 
| 450 |  | 
| 451 | void QGstreamerMediaCaptureSession::setVideoPreview(QVideoSink *sink) | 
| 452 | { | 
| 453 |     auto *gstSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr; | 
| 454 |     if (gstSink) | 
| 455 |         gstSink->setAsync(false); | 
| 456 |  | 
| 457 |     gstVideoOutput->setVideoSink(sink); | 
| 458 |     capturePipeline.dumpGraph(fileNamePrefix: "setVideoPreview" ); | 
| 459 | } | 
| 460 |  | 
| 461 | void QGstreamerMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output) | 
| 462 | { | 
| 463 |     if (gstAudioOutput == output) | 
| 464 |         return; | 
| 465 |  | 
| 466 |     auto *gstOutput = static_cast<QGstreamerAudioOutput *>(output); | 
| 467 |     if (gstOutput) | 
| 468 |         gstOutput->setAsync(false); | 
| 469 |  | 
| 470 |     if (!gstAudioInput) { | 
| 471 |         // audio output is not active, since there is no audio input | 
| 472 |         gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); | 
| 473 |     } else { | 
| 474 |         QGstElement oldOutputElement = | 
| 475 |                 gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{}; | 
| 476 |         gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); | 
| 477 |  | 
| 478 |         audioSrcPadForOutput.modifyPipelineInIdleProbe(f: [&] { | 
| 479 |             if (oldOutputElement) | 
| 480 |                 oldOutputElement.sink().unlinkPeer(); | 
| 481 |  | 
| 482 |             if (gstAudioOutput) { | 
| 483 |                 capturePipeline.add(ts: gstAudioOutput->gstElement()); | 
| 484 |                 audioSrcPadForOutput.link(sink: gstAudioOutput->gstElement().staticPad(name: "sink" )); | 
| 485 |                 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING); | 
| 486 |             } | 
| 487 |         }); | 
| 488 |  | 
| 489 |         if (gstAudioOutput) | 
| 490 |             gstAudioOutput->gstElement().finishStateChange(); | 
| 491 |  | 
| 492 |         if (oldOutputElement) | 
| 493 |             capturePipeline.stopAndRemoveElements(ts&: oldOutputElement); | 
| 494 |     } | 
| 495 | } | 
| 496 |  | 
| 497 | QGstreamerVideoSink *QGstreamerMediaCaptureSession::gstreamerVideoSink() const | 
| 498 | { | 
| 499 |     return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr; | 
| 500 | } | 
| 501 |  | 
| 502 | bool QGstreamerMediaCaptureSession::processBusMessage(const QGstreamerMessage &msg) | 
| 503 | { | 
| 504 |     if (m_mediaRecorder) | 
| 505 |         m_mediaRecorder->processBusMessage(message: msg); | 
| 506 |  | 
| 507 |     switch (msg.type()) { | 
| 508 |     case GST_MESSAGE_ERROR: | 
| 509 |         return processBusMessageError(msg); | 
| 510 |  | 
| 511 |     case GST_MESSAGE_LATENCY: | 
| 512 |         return processBusMessageLatency(msg); | 
| 513 |  | 
| 514 |     default: | 
| 515 |         break; | 
| 516 |     } | 
| 517 |  | 
| 518 |     return false; | 
| 519 | } | 
| 520 |  | 
| 521 | bool QGstreamerMediaCaptureSession::processBusMessageError(const QGstreamerMessage &msg) | 
| 522 | { | 
| 523 |     QUniqueGErrorHandle error; | 
| 524 |     QUniqueGStringHandle message; | 
| 525 |     gst_message_parse_error(message: msg.message(), gerror: &error, debug: &message); | 
| 526 |  | 
| 527 |     qWarning() << "QGstreamerMediaCapture: received error from gstreamer"  << error << message; | 
| 528 |     capturePipeline.dumpGraph(fileNamePrefix: "captureError" ); | 
| 529 |  | 
| 530 |     return false; | 
| 531 | } | 
| 532 |  | 
| 533 | bool QGstreamerMediaCaptureSession::processBusMessageLatency(const QGstreamerMessage &) | 
| 534 | { | 
| 535 |     capturePipeline.recalculateLatency(); | 
| 536 |     return false; | 
| 537 | } | 
| 538 |  | 
| 539 | QT_END_NAMESPACE | 
| 540 |  |