1 | // Copyright (C) 2024 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | #include "qffmpegvideoencoder_p.h" |
4 | #include "qffmpegmuxer_p.h" |
5 | #include "qffmpegvideobuffer_p.h" |
6 | #include "qffmpegrecordingengine_p.h" |
7 | #include "qffmpegvideoframeencoder_p.h" |
8 | #include "qffmpegrecordingengineutils_p.h" |
9 | #include "private/qvideoframe_p.h" |
10 | #include "private/qmultimediautils_p.h" |
11 | #include <QtCore/qloggingcategory.h> |
12 | |
13 | QT_BEGIN_NAMESPACE |
14 | |
15 | namespace QFFmpeg { |
16 | |
17 | static Q_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder" ); |
18 | |
19 | VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings, |
20 | const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat) |
21 | : EncoderThread(recordingEngine), m_settings(settings) |
22 | { |
23 | setObjectName(QLatin1String("VideoEncoder" )); |
24 | |
25 | const AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(pixelFormat: format.pixelFormat()); |
26 | qreal frameRate = format.streamFrameRate(); |
27 | if (frameRate <= 0.) { |
28 | qWarning() << "Invalid frameRate" << frameRate << "; Using the default instead" ; |
29 | |
30 | // set some default frame rate since ffmpeg has UB if it's 0. |
31 | frameRate = 30.; |
32 | } |
33 | |
34 | m_sourceParams.size = format.frameSize(); |
35 | m_sourceParams.format = hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat; |
36 | // Temporary: check isSwPixelFormat because of android issue (QTBUG-116836) |
37 | // TODO: assign swFormat. |
38 | m_sourceParams.swFormat = |
39 | isSwPixelFormat(format: m_sourceParams.format) ? m_sourceParams.format : swFormat; |
40 | m_sourceParams.transform = qNormalizedSurfaceTransformation(format); |
41 | m_sourceParams.frameRate = frameRate; |
42 | m_sourceParams.colorTransfer = QFFmpeg::toAvColorTransfer(colorTrc: format.colorTransfer()); |
43 | m_sourceParams.colorSpace = QFFmpeg::toAvColorSpace(colorSpace: format.colorSpace()); |
44 | m_sourceParams.colorRange = QFFmpeg::toAvColorRange(colorRange: format.colorRange()); |
45 | |
46 | if (!m_settings.videoResolution().isValid()) |
47 | m_settings.setVideoResolution(m_sourceParams.size); |
48 | |
49 | if (m_settings.videoFrameRate() <= 0.) |
50 | m_settings.setVideoFrameRate(m_sourceParams.frameRate); |
51 | } |
52 | |
53 | VideoEncoder::~VideoEncoder() = default; |
54 | |
55 | void VideoEncoder::addFrame(const QVideoFrame &frame) |
56 | { |
57 | if (!frame.isValid()) { |
58 | setEndOfSourceStream(); |
59 | return; |
60 | } |
61 | |
62 | { |
63 | auto guard = lockLoopData(); |
64 | |
65 | resetEndOfSourceStream(); |
66 | |
67 | if (m_paused) { |
68 | m_shouldAdjustTimeBaseForNextFrame = true; |
69 | return; |
70 | } |
71 | |
72 | // Drop frames if encoder can not keep up with the video source data rate; |
73 | // canPushFrame might be used instead |
74 | const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize; |
75 | |
76 | if (queueFull) { |
77 | qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost." ; |
78 | return; |
79 | } |
80 | |
81 | m_videoFrameQueue.push(x: { .frame: frame, .shouldAdjustTimeBase: m_shouldAdjustTimeBaseForNextFrame }); |
82 | m_shouldAdjustTimeBaseForNextFrame = false; |
83 | } |
84 | |
85 | dataReady(); |
86 | } |
87 | |
88 | VideoEncoder::FrameInfo VideoEncoder::takeFrame() |
89 | { |
90 | auto guard = lockLoopData(); |
91 | return dequeueIfPossible(queue&: m_videoFrameQueue); |
92 | } |
93 | |
94 | void VideoEncoder::retrievePackets() |
95 | { |
96 | Q_ASSERT(m_frameEncoder); |
97 | while (auto packet = m_frameEncoder->retrievePacket()) |
98 | m_recordingEngine.getMuxer()->addPacket(packet: std::move(packet)); |
99 | } |
100 | |
101 | bool VideoEncoder::init() |
102 | { |
103 | m_frameEncoder = VideoFrameEncoder::create(encoderSettings: m_settings, sourceParams: m_sourceParams, |
104 | formatContext: m_recordingEngine.avFormatContext()); |
105 | |
106 | qCDebug(qLcFFmpegVideoEncoder) << "VideoEncoder::init started video device thread." ; |
107 | if (!m_frameEncoder) { |
108 | emit m_recordingEngine.sessionError(code: QMediaRecorder::ResourceError, |
109 | description: "Could not initialize encoder" ); |
110 | return false; |
111 | } |
112 | |
113 | return EncoderThread::init(); |
114 | } |
115 | |
116 | void VideoEncoder::cleanup() |
117 | { |
118 | Q_ASSERT(m_frameEncoder); |
119 | |
120 | while (!m_videoFrameQueue.empty()) |
121 | processOne(); |
122 | |
123 | while (m_frameEncoder->sendFrame(inputFrame: nullptr) == AVERROR(EAGAIN)) |
124 | retrievePackets(); |
125 | retrievePackets(); |
126 | } |
127 | |
128 | bool VideoEncoder::hasData() const |
129 | { |
130 | return !m_videoFrameQueue.empty(); |
131 | } |
132 | |
133 | struct QVideoFrameHolder |
134 | { |
135 | QVideoFrame f; |
136 | QImage i; |
137 | }; |
138 | |
139 | static void freeQVideoFrame(void *opaque, uint8_t *) |
140 | { |
141 | delete reinterpret_cast<QVideoFrameHolder *>(opaque); |
142 | } |
143 | |
144 | void VideoEncoder::processOne() |
145 | { |
146 | Q_ASSERT(m_frameEncoder); |
147 | |
148 | retrievePackets(); |
149 | |
150 | FrameInfo frameInfo = takeFrame(); |
151 | QVideoFrame &frame = frameInfo.frame; |
152 | Q_ASSERT(frame.isValid()); |
153 | |
154 | // qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime(); |
155 | |
156 | AVFrameUPtr avFrame; |
157 | |
158 | auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame)); |
159 | if (videoBuffer) { |
160 | // ffmpeg video buffer, let's use the native AVFrame stored in there |
161 | auto *hwFrame = videoBuffer->getHWFrame(); |
162 | if (hwFrame && hwFrame->format == m_frameEncoder->sourceFormat()) |
163 | avFrame.reset(p: av_frame_clone(src: hwFrame)); |
164 | } |
165 | |
166 | if (!avFrame) { |
167 | frame.map(mode: QVideoFrame::ReadOnly); |
168 | auto size = frame.size(); |
169 | avFrame = makeAVFrame(); |
170 | avFrame->format = m_frameEncoder->sourceFormat(); |
171 | avFrame->width = size.width(); |
172 | avFrame->height = size.height(); |
173 | |
174 | for (int i = 0; i < 4; ++i) { |
175 | avFrame->data[i] = const_cast<uint8_t *>(frame.bits(plane: i)); |
176 | avFrame->linesize[i] = frame.bytesPerLine(plane: i); |
177 | } |
178 | |
179 | // TODO: investigate if we need to set color params to AVFrame. |
180 | // Setting only codec carameters might be sufficient. |
181 | // What happens if frame color params are set and not equal codec prms? |
182 | // |
183 | // QVideoFrameFormat format = frame.surfaceFormat(); |
184 | // avFrame->color_trc = QFFmpeg::toAvColorTransfer(format.colorTransfer()); |
185 | // avFrame->colorspace = QFFmpeg::toAvColorSpace(format.colorSpace()); |
186 | // avFrame->color_range = QFFmpeg::toAvColorRange(format.colorRange()); |
187 | |
188 | QImage img; |
189 | if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) { |
190 | // the QImage is cached inside the video frame, so we can take the pointer to the image |
191 | // data here |
192 | img = frame.toImage(); |
193 | avFrame->data[0] = (uint8_t *)img.bits(); |
194 | avFrame->linesize[0] = img.bytesPerLine(); |
195 | } |
196 | |
197 | Q_ASSERT(avFrame->data[0]); |
198 | // ensure the video frame and it's data is alive as long as it's being used in the encoder |
199 | avFrame->opaque_ref = av_buffer_create(data: nullptr, size: 0, free: freeQVideoFrame, |
200 | opaque: new QVideoFrameHolder{ .f: frame, .i: img }, flags: 0); |
201 | } |
202 | |
203 | const auto [startTime, endTime] = frameTimeStamps(frame); |
204 | |
205 | if (frameInfo.shouldAdjustTimeBase) { |
206 | m_baseTime += startTime - m_lastFrameTime; |
207 | qCDebug(qLcFFmpegVideoEncoder) |
208 | << ">>>> adjusting base time to" << m_baseTime << startTime << m_lastFrameTime; |
209 | } |
210 | |
211 | const qint64 time = startTime - m_baseTime; |
212 | m_lastFrameTime = endTime; |
213 | |
214 | setAVFrameTime(frame&: *avFrame, pts: m_frameEncoder->getPts(ms: time), timeBase: m_frameEncoder->getTimeBase()); |
215 | |
216 | m_recordingEngine.newTimeStamp(time: time / 1000); |
217 | |
218 | qCDebug(qLcFFmpegVideoEncoder) |
219 | << ">>> sending frame" << avFrame->pts << time << m_lastFrameTime; |
220 | int ret = m_frameEncoder->sendFrame(inputFrame: std::move(avFrame)); |
221 | if (ret < 0) { |
222 | qCDebug(qLcFFmpegVideoEncoder) << "error sending frame" << ret << err2str(errnum: ret); |
223 | emit m_recordingEngine.sessionError(code: QMediaRecorder::ResourceError, description: err2str(errnum: ret)); |
224 | } |
225 | } |
226 | |
227 | bool VideoEncoder::checkIfCanPushFrame() const |
228 | { |
229 | if (m_encodingStarted) |
230 | return m_videoFrameQueue.size() < m_maxQueueSize; |
231 | if (!isFinished()) |
232 | return m_videoFrameQueue.empty(); |
233 | |
234 | return false; |
235 | } |
236 | |
237 | std::pair<qint64, qint64> VideoEncoder::frameTimeStamps(const QVideoFrame &frame) const |
238 | { |
239 | qint64 startTime = frame.startTime(); |
240 | qint64 endTime = frame.endTime(); |
241 | |
242 | if (startTime == -1) { |
243 | startTime = m_lastFrameTime; |
244 | endTime = -1; |
245 | } |
246 | |
247 | if (endTime == -1) { |
248 | qreal frameRate = frame.streamFrameRate(); |
249 | if (frameRate <= 0.) |
250 | frameRate = m_settings.videoFrameRate(); |
251 | |
252 | Q_ASSERT(frameRate > 0.f); |
253 | endTime = startTime + static_cast<qint64>(std::round(x: VideoFrameTimeBase / frameRate)); |
254 | } |
255 | |
256 | return { startTime, endTime }; |
257 | } |
258 | |
259 | } // namespace QFFmpeg |
260 | |
261 | QT_END_NAMESPACE |
262 | |