1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qgstreamermediacapture_p.h" |
5 | #include "qgstreamermediaencoder_p.h" |
6 | #include "qgstreamerimagecapture_p.h" |
7 | #include "qgstreamercamera_p.h" |
8 | #include <qgstpipeline_p.h> |
9 | |
10 | #include "qgstreameraudioinput_p.h" |
11 | #include "qgstreameraudiooutput_p.h" |
12 | #include "qgstreamervideooutput_p.h" |
13 | |
14 | #include <qloggingcategory.h> |
15 | |
16 | QT_BEGIN_NAMESPACE |
17 | |
18 | static void linkTeeToPad(QGstElement tee, QGstPad sink) |
19 | { |
20 | if (tee.isNull() || sink.isNull()) |
21 | return; |
22 | |
23 | auto source = tee.getRequestPad(name: "src_%u" ); |
24 | source.link(sink); |
25 | } |
26 | |
27 | static void unlinkTeeFromPad(QGstElement tee, QGstPad sink) |
28 | { |
29 | if (tee.isNull() || sink.isNull()) |
30 | return; |
31 | |
32 | auto source = sink.peer(); |
33 | source.unlink(sink); |
34 | |
35 | tee.releaseRequestPad(pad: source); |
36 | } |
37 | |
38 | QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCapture::create() |
39 | { |
40 | auto videoOutput = QGstreamerVideoOutput::create(); |
41 | if (!videoOutput) |
42 | return videoOutput.error(); |
43 | |
44 | return new QGstreamerMediaCapture(videoOutput.value()); |
45 | } |
46 | |
47 | QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput) |
48 | : gstPipeline("pipeline" ), gstVideoOutput(videoOutput) |
49 | { |
50 | gstVideoOutput->setParent(this); |
51 | gstVideoOutput->setIsPreview(); |
52 | gstVideoOutput->setPipeline(gstPipeline); |
53 | |
54 | // Use system clock to drive all elements in the pipeline. Otherwise, |
55 | // the clock is sourced from the elements (e.g. from an audio source). |
56 | // Since the elements are added and removed dynamically the clock would |
57 | // also change causing lost of synchronization in the pipeline. |
58 | gst_pipeline_use_clock(pipeline: gstPipeline.pipeline(), clock: gst_system_clock_obtain()); |
59 | |
60 | // This is the recording pipeline with only live sources, thus the pipeline |
61 | // will be always in the playing state. |
62 | gstPipeline.setState(GST_STATE_PLAYING); |
63 | gstPipeline.setInStoppedState(false); |
64 | |
65 | gstPipeline.dumpGraph(fileName: "initial" ); |
66 | } |
67 | |
68 | QGstreamerMediaCapture::~QGstreamerMediaCapture() |
69 | { |
70 | setMediaRecorder(nullptr); |
71 | setImageCapture(nullptr); |
72 | setCamera(nullptr); |
73 | gstPipeline.setStateSync(GST_STATE_NULL); |
74 | } |
75 | |
76 | QPlatformCamera *QGstreamerMediaCapture::camera() |
77 | { |
78 | return gstCamera; |
79 | } |
80 | |
81 | void QGstreamerMediaCapture::setCamera(QPlatformCamera *camera) |
82 | { |
83 | QGstreamerCamera *control = static_cast<QGstreamerCamera *>(camera); |
84 | if (gstCamera == control) |
85 | return; |
86 | |
87 | if (gstCamera) { |
88 | unlinkTeeFromPad(tee: gstVideoTee, sink: encoderVideoSink); |
89 | unlinkTeeFromPad(tee: gstVideoTee, sink: imageCaptureSink); |
90 | |
91 | auto camera = gstCamera->gstElement(); |
92 | |
93 | gstPipeline.remove(element: camera); |
94 | gstPipeline.remove(element: gstVideoTee); |
95 | gstPipeline.remove(element: gstVideoOutput->gstElement()); |
96 | |
97 | camera.setStateSync(GST_STATE_NULL); |
98 | gstVideoTee.setStateSync(GST_STATE_NULL); |
99 | gstVideoOutput->gstElement().setStateSync(GST_STATE_NULL); |
100 | |
101 | gstVideoTee = {}; |
102 | gstCamera->setCaptureSession(nullptr); |
103 | } |
104 | |
105 | gstCamera = control; |
106 | if (gstCamera) { |
107 | QGstElement camera = gstCamera->gstElement(); |
108 | gstVideoTee = QGstElement("tee" , "videotee" ); |
109 | gstVideoTee.set(property: "allow-not-linked" , b: true); |
110 | |
111 | gstPipeline.add(e1: gstVideoOutput->gstElement(), e2: camera, e3: gstVideoTee); |
112 | |
113 | linkTeeToPad(tee: gstVideoTee, sink: encoderVideoSink); |
114 | linkTeeToPad(tee: gstVideoTee, sink: gstVideoOutput->gstElement().staticPad(name: "sink" )); |
115 | linkTeeToPad(tee: gstVideoTee, sink: imageCaptureSink); |
116 | |
117 | camera.link(next: gstVideoTee); |
118 | |
119 | gstVideoOutput->gstElement().setState(GST_STATE_PLAYING); |
120 | gstVideoTee.setState(GST_STATE_PLAYING); |
121 | camera.setState(GST_STATE_PLAYING); |
122 | } |
123 | |
124 | gstPipeline.dumpGraph(fileName: "camera" ); |
125 | |
126 | emit cameraChanged(); |
127 | } |
128 | |
129 | QPlatformImageCapture *QGstreamerMediaCapture::imageCapture() |
130 | { |
131 | return m_imageCapture; |
132 | } |
133 | |
134 | void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture) |
135 | { |
136 | QGstreamerImageCapture *control = static_cast<QGstreamerImageCapture *>(imageCapture); |
137 | if (m_imageCapture == control) |
138 | return; |
139 | |
140 | if (m_imageCapture) { |
141 | unlinkTeeFromPad(tee: gstVideoTee, sink: imageCaptureSink); |
142 | gstPipeline.remove(element: m_imageCapture->gstElement()); |
143 | m_imageCapture->gstElement().setStateSync(GST_STATE_NULL); |
144 | imageCaptureSink = {}; |
145 | m_imageCapture->setCaptureSession(nullptr); |
146 | } |
147 | |
148 | m_imageCapture = control; |
149 | if (m_imageCapture) { |
150 | imageCaptureSink = m_imageCapture->gstElement().staticPad(name: "sink" ); |
151 | m_imageCapture->gstElement().setState(GST_STATE_PLAYING); |
152 | gstPipeline.add(element: m_imageCapture->gstElement()); |
153 | linkTeeToPad(tee: gstVideoTee, sink: imageCaptureSink); |
154 | m_imageCapture->setCaptureSession(this); |
155 | } |
156 | |
157 | gstPipeline.dumpGraph(fileName: "imageCapture" ); |
158 | |
159 | emit imageCaptureChanged(); |
160 | } |
161 | |
162 | void QGstreamerMediaCapture::setMediaRecorder(QPlatformMediaRecorder *recorder) |
163 | { |
164 | QGstreamerMediaEncoder *control = static_cast<QGstreamerMediaEncoder *>(recorder); |
165 | if (m_mediaEncoder == control) |
166 | return; |
167 | |
168 | if (m_mediaEncoder) |
169 | m_mediaEncoder->setCaptureSession(nullptr); |
170 | m_mediaEncoder = control; |
171 | if (m_mediaEncoder) |
172 | m_mediaEncoder->setCaptureSession(this); |
173 | |
174 | emit encoderChanged(); |
175 | gstPipeline.dumpGraph(fileName: "encoder" ); |
176 | } |
177 | |
178 | QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder() |
179 | { |
180 | return m_mediaEncoder; |
181 | } |
182 | |
183 | void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink) |
184 | { |
185 | if (!gstVideoTee.isNull() && !videoSink.isNull()) { |
186 | auto caps = gst_pad_get_current_caps(pad: gstVideoTee.sink().pad()); |
187 | |
188 | encoderVideoCapsFilter = QGstElement("capsfilter" , "encoderVideoCapsFilter" ); |
189 | Q_ASSERT(encoderVideoCapsFilter); |
190 | encoderVideoCapsFilter.set(property: "caps" , c: QGstCaps(caps, QGstCaps::HasRef)); |
191 | |
192 | gstPipeline.add(element: encoderVideoCapsFilter); |
193 | |
194 | encoderVideoCapsFilter.src().link(sink: videoSink); |
195 | linkTeeToPad(tee: gstVideoTee, sink: encoderVideoCapsFilter.sink()); |
196 | encoderVideoCapsFilter.setState(GST_STATE_PLAYING); |
197 | encoderVideoSink = encoderVideoCapsFilter.sink(); |
198 | } |
199 | |
200 | if (!gstAudioTee.isNull() && !audioSink.isNull()) { |
201 | auto caps = gst_pad_get_current_caps(pad: gstAudioTee.sink().pad()); |
202 | |
203 | encoderAudioCapsFilter = QGstElement("capsfilter" , "encoderAudioCapsFilter" ); |
204 | Q_ASSERT(encoderAudioCapsFilter); |
205 | encoderAudioCapsFilter.set(property: "caps" , c: QGstCaps(caps, QGstCaps::HasRef)); |
206 | |
207 | gstPipeline.add(element: encoderAudioCapsFilter); |
208 | |
209 | encoderAudioCapsFilter.src().link(sink: audioSink); |
210 | linkTeeToPad(tee: gstAudioTee, sink: encoderAudioCapsFilter.sink()); |
211 | encoderAudioCapsFilter.setState(GST_STATE_PLAYING); |
212 | encoderAudioSink = encoderAudioCapsFilter.sink(); |
213 | } |
214 | } |
215 | |
216 | void QGstreamerMediaCapture::unlinkEncoder() |
217 | { |
218 | if (!encoderVideoCapsFilter.isNull()) { |
219 | encoderVideoCapsFilter.src().unlinkPeer(); |
220 | unlinkTeeFromPad(tee: gstVideoTee, sink: encoderVideoCapsFilter.sink()); |
221 | gstPipeline.remove(element: encoderVideoCapsFilter); |
222 | encoderVideoCapsFilter.setStateSync(GST_STATE_NULL); |
223 | encoderVideoCapsFilter = {}; |
224 | } |
225 | |
226 | if (!encoderAudioCapsFilter.isNull()) { |
227 | encoderAudioCapsFilter.src().unlinkPeer(); |
228 | unlinkTeeFromPad(tee: gstAudioTee, sink: encoderAudioCapsFilter.sink()); |
229 | gstPipeline.remove(element: encoderAudioCapsFilter); |
230 | encoderAudioCapsFilter.setStateSync(GST_STATE_NULL); |
231 | encoderAudioCapsFilter = {}; |
232 | } |
233 | |
234 | encoderAudioSink = {}; |
235 | encoderVideoSink = {}; |
236 | } |
237 | |
238 | void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input) |
239 | { |
240 | if (gstAudioInput == input) |
241 | return; |
242 | |
243 | if (gstAudioInput) { |
244 | unlinkTeeFromPad(tee: gstAudioTee, sink: encoderAudioSink); |
245 | |
246 | if (gstAudioOutput) { |
247 | unlinkTeeFromPad(tee: gstAudioTee, sink: gstAudioOutput->gstElement().staticPad(name: "sink" )); |
248 | gstPipeline.remove(element: gstAudioOutput->gstElement()); |
249 | gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL); |
250 | } |
251 | |
252 | gstPipeline.remove(element: gstAudioInput->gstElement()); |
253 | gstPipeline.remove(element: gstAudioTee); |
254 | gstAudioInput->gstElement().setStateSync(GST_STATE_NULL); |
255 | gstAudioTee.setStateSync(GST_STATE_NULL); |
256 | gstAudioTee = {}; |
257 | } |
258 | |
259 | gstAudioInput = static_cast<QGstreamerAudioInput *>(input); |
260 | if (gstAudioInput) { |
261 | Q_ASSERT(gstAudioTee.isNull()); |
262 | gstAudioTee = QGstElement("tee" , "audiotee" ); |
263 | gstAudioTee.set(property: "allow-not-linked" , b: true); |
264 | gstPipeline.add(e1: gstAudioInput->gstElement(), e2: gstAudioTee); |
265 | gstAudioInput->gstElement().link(next: gstAudioTee); |
266 | |
267 | if (gstAudioOutput) { |
268 | gstPipeline.add(element: gstAudioOutput->gstElement()); |
269 | gstAudioOutput->gstElement().setState(GST_STATE_PLAYING); |
270 | linkTeeToPad(tee: gstAudioTee, sink: gstAudioOutput->gstElement().staticPad(name: "sink" )); |
271 | } |
272 | |
273 | gstAudioTee.setState(GST_STATE_PLAYING); |
274 | gstAudioInput->gstElement().setStateSync(GST_STATE_PLAYING); |
275 | |
276 | linkTeeToPad(tee: gstAudioTee, sink: encoderAudioSink); |
277 | } |
278 | } |
279 | |
280 | void QGstreamerMediaCapture::setVideoPreview(QVideoSink *sink) |
281 | { |
282 | gstVideoOutput->setVideoSink(sink); |
283 | } |
284 | |
285 | void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output) |
286 | { |
287 | if (gstAudioOutput == output) |
288 | return; |
289 | |
290 | if (gstAudioOutput && gstAudioInput) { |
291 | // If audio input is set, the output is in the pipeline |
292 | unlinkTeeFromPad(tee: gstAudioTee, sink: gstAudioOutput->gstElement().staticPad(name: "sink" )); |
293 | gstPipeline.remove(element: gstAudioOutput->gstElement()); |
294 | gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL); |
295 | } |
296 | |
297 | gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); |
298 | if (gstAudioOutput && gstAudioInput) { |
299 | gstPipeline.add(element: gstAudioOutput->gstElement()); |
300 | gstAudioOutput->gstElement().setState(GST_STATE_PLAYING); |
301 | linkTeeToPad(tee: gstAudioTee, sink: gstAudioOutput->gstElement().staticPad(name: "sink" )); |
302 | } |
303 | } |
304 | |
305 | QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const |
306 | { |
307 | return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr; |
308 | } |
309 | |
310 | |
311 | QT_END_NAMESPACE |
312 | |
313 | #include "moc_qgstreamermediacapture_p.cpp" |
314 | |