| 1 | // Copyright (C) 2024 The Qt Company Ltd. |
| 2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
| 3 | #include "qffmpegvideoencoder_p.h" |
| 4 | #include "qffmpegmuxer_p.h" |
| 5 | #include "qffmpegvideobuffer_p.h" |
| 6 | #include "qffmpegrecordingengine_p.h" |
| 7 | #include "qffmpegvideoframeencoder_p.h" |
| 8 | #include "qffmpegrecordingengineutils_p.h" |
| 9 | #include "private/qvideoframe_p.h" |
| 10 | #include "private/qmultimediautils_p.h" |
| 11 | #include <QtCore/qloggingcategory.h> |
| 12 | |
| 13 | QT_BEGIN_NAMESPACE |
| 14 | |
| 15 | using namespace Qt::StringLiterals; |
| 16 | |
| 17 | namespace QFFmpeg { |
| 18 | |
| 19 | static Q_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder" ); |
| 20 | |
| 21 | VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings, |
| 22 | const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat) |
| 23 | : EncoderThread(recordingEngine), m_settings(settings) |
| 24 | { |
| 25 | setObjectName(QLatin1String("VideoEncoder" )); |
| 26 | |
| 27 | const AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(pixelFormat: format.pixelFormat()); |
| 28 | qreal frameRate = format.streamFrameRate(); |
| 29 | if (frameRate <= 0.) { |
| 30 | qWarning() << "Invalid frameRate" << frameRate << "; Using the default instead" ; |
| 31 | |
| 32 | // set some default frame rate since ffmpeg has UB if it's 0. |
| 33 | frameRate = 30.; |
| 34 | } |
| 35 | |
| 36 | m_sourceParams.size = format.frameSize(); |
| 37 | m_sourceParams.format = hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat; |
| 38 | // Temporary: check isSwPixelFormat because of android issue (QTBUG-116836) |
| 39 | // TODO: assign swFormat. |
| 40 | m_sourceParams.swFormat = |
| 41 | isSwPixelFormat(format: m_sourceParams.format) ? m_sourceParams.format : swFormat; |
| 42 | m_sourceParams.transform = qNormalizedSurfaceTransformation(format); |
| 43 | m_sourceParams.frameRate = frameRate; |
| 44 | m_sourceParams.colorTransfer = QFFmpeg::toAvColorTransfer(colorTrc: format.colorTransfer()); |
| 45 | m_sourceParams.colorSpace = QFFmpeg::toAvColorSpace(colorSpace: format.colorSpace()); |
| 46 | m_sourceParams.colorRange = QFFmpeg::toAvColorRange(colorRange: format.colorRange()); |
| 47 | |
| 48 | if (!m_settings.videoResolution().isValid()) |
| 49 | m_settings.setVideoResolution(m_sourceParams.size); |
| 50 | |
| 51 | if (m_settings.videoFrameRate() <= 0.) |
| 52 | m_settings.setVideoFrameRate(m_sourceParams.frameRate); |
| 53 | } |
| 54 | |
| 55 | VideoEncoder::~VideoEncoder() = default; |
| 56 | |
| 57 | void VideoEncoder::addFrame(const QVideoFrame &frame) |
| 58 | { |
| 59 | if (!frame.isValid()) { |
| 60 | setEndOfSourceStream(); |
| 61 | return; |
| 62 | } |
| 63 | |
| 64 | { |
| 65 | auto guard = lockLoopData(); |
| 66 | |
| 67 | resetEndOfSourceStream(); |
| 68 | |
| 69 | if (m_paused) { |
| 70 | m_shouldAdjustTimeBaseForNextFrame = true; |
| 71 | return; |
| 72 | } |
| 73 | |
| 74 | // Drop frames if encoder can not keep up with the video source data rate; |
| 75 | // canPushFrame might be used instead |
| 76 | const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize; |
| 77 | |
| 78 | if (queueFull) { |
| 79 | qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost." ; |
| 80 | return; |
| 81 | } |
| 82 | |
| 83 | m_videoFrameQueue.push(x: { .frame: frame, .shouldAdjustTimeBase: m_shouldAdjustTimeBaseForNextFrame }); |
| 84 | m_shouldAdjustTimeBaseForNextFrame = false; |
| 85 | } |
| 86 | |
| 87 | dataReady(); |
| 88 | } |
| 89 | |
| 90 | VideoEncoder::FrameInfo VideoEncoder::takeFrame() |
| 91 | { |
| 92 | auto guard = lockLoopData(); |
| 93 | return dequeueIfPossible(queue&: m_videoFrameQueue); |
| 94 | } |
| 95 | |
| 96 | void VideoEncoder::retrievePackets() |
| 97 | { |
| 98 | Q_ASSERT(m_frameEncoder); |
| 99 | while (auto packet = m_frameEncoder->retrievePacket()) |
| 100 | m_recordingEngine.getMuxer()->addPacket(packet: std::move(packet)); |
| 101 | } |
| 102 | |
| 103 | bool VideoEncoder::init() |
| 104 | { |
| 105 | m_frameEncoder = VideoFrameEncoder::create(encoderSettings: m_settings, sourceParams: m_sourceParams, |
| 106 | formatContext: m_recordingEngine.avFormatContext()); |
| 107 | |
| 108 | qCDebug(qLcFFmpegVideoEncoder) << "VideoEncoder::init started video device thread." ; |
| 109 | if (!m_frameEncoder) { |
| 110 | emit m_recordingEngine.sessionError(code: QMediaRecorder::ResourceError, |
| 111 | description: u"Could not initialize encoder"_s ); |
| 112 | return false; |
| 113 | } |
| 114 | |
| 115 | return EncoderThread::init(); |
| 116 | } |
| 117 | |
| 118 | void VideoEncoder::cleanup() |
| 119 | { |
| 120 | Q_ASSERT(m_frameEncoder); |
| 121 | |
| 122 | while (!m_videoFrameQueue.empty()) |
| 123 | processOne(); |
| 124 | |
| 125 | while (m_frameEncoder->sendFrame(inputFrame: nullptr) == AVERROR(EAGAIN)) |
| 126 | retrievePackets(); |
| 127 | retrievePackets(); |
| 128 | } |
| 129 | |
| 130 | bool VideoEncoder::hasData() const |
| 131 | { |
| 132 | return !m_videoFrameQueue.empty(); |
| 133 | } |
| 134 | |
| 135 | struct QVideoFrameHolder |
| 136 | { |
| 137 | QVideoFrame f; |
| 138 | QImage i; |
| 139 | }; |
| 140 | |
| 141 | static void freeQVideoFrame(void *opaque, uint8_t *) |
| 142 | { |
| 143 | delete reinterpret_cast<QVideoFrameHolder *>(opaque); |
| 144 | } |
| 145 | |
| 146 | void VideoEncoder::processOne() |
| 147 | { |
| 148 | Q_ASSERT(m_frameEncoder); |
| 149 | |
| 150 | retrievePackets(); |
| 151 | |
| 152 | FrameInfo frameInfo = takeFrame(); |
| 153 | QVideoFrame &frame = frameInfo.frame; |
| 154 | Q_ASSERT(frame.isValid()); |
| 155 | |
| 156 | // qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime(); |
| 157 | |
| 158 | AVFrameUPtr avFrame; |
| 159 | |
| 160 | auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame)); |
| 161 | if (videoBuffer) { |
| 162 | // ffmpeg video buffer, let's use the native AVFrame stored in there |
| 163 | auto *hwFrame = videoBuffer->getHWFrame(); |
| 164 | if (hwFrame && hwFrame->format == m_frameEncoder->sourceFormat()) |
| 165 | avFrame.reset(p: av_frame_clone(src: hwFrame)); |
| 166 | } |
| 167 | |
| 168 | if (!avFrame) { |
| 169 | frame.map(mode: QVideoFrame::ReadOnly); |
| 170 | auto size = frame.size(); |
| 171 | avFrame = makeAVFrame(); |
| 172 | avFrame->format = m_frameEncoder->sourceFormat(); |
| 173 | avFrame->width = size.width(); |
| 174 | avFrame->height = size.height(); |
| 175 | |
| 176 | for (int i = 0; i < 4; ++i) { |
| 177 | avFrame->data[i] = const_cast<uint8_t *>(frame.bits(plane: i)); |
| 178 | avFrame->linesize[i] = frame.bytesPerLine(plane: i); |
| 179 | } |
| 180 | |
| 181 | // TODO: investigate if we need to set color params to AVFrame. |
| 182 | // Setting only codec carameters might be sufficient. |
| 183 | // What happens if frame color params are set and not equal codec prms? |
| 184 | // |
| 185 | // QVideoFrameFormat format = frame.surfaceFormat(); |
| 186 | // avFrame->color_trc = QFFmpeg::toAvColorTransfer(format.colorTransfer()); |
| 187 | // avFrame->colorspace = QFFmpeg::toAvColorSpace(format.colorSpace()); |
| 188 | // avFrame->color_range = QFFmpeg::toAvColorRange(format.colorRange()); |
| 189 | |
| 190 | QImage img; |
| 191 | if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) { |
| 192 | // the QImage is cached inside the video frame, so we can take the pointer to the image |
| 193 | // data here |
| 194 | img = frame.toImage(); |
| 195 | avFrame->data[0] = (uint8_t *)img.bits(); |
| 196 | avFrame->linesize[0] = img.bytesPerLine(); |
| 197 | } |
| 198 | |
| 199 | Q_ASSERT(avFrame->data[0]); |
| 200 | // ensure the video frame and it's data is alive as long as it's being used in the encoder |
| 201 | avFrame->opaque_ref = av_buffer_create(data: nullptr, size: 0, free: freeQVideoFrame, |
| 202 | opaque: new QVideoFrameHolder{ .f: frame, .i: img }, flags: 0); |
| 203 | } |
| 204 | |
| 205 | const auto [startTime, endTime] = frameTimeStamps(frame); |
| 206 | |
| 207 | if (frameInfo.shouldAdjustTimeBase) { |
| 208 | m_baseTime += startTime - m_lastFrameTime; |
| 209 | qCDebug(qLcFFmpegVideoEncoder) |
| 210 | << ">>>> adjusting base time to" << m_baseTime << startTime << m_lastFrameTime; |
| 211 | } |
| 212 | |
| 213 | const qint64 time = startTime - m_baseTime; |
| 214 | m_lastFrameTime = endTime; |
| 215 | |
| 216 | setAVFrameTime(frame&: *avFrame, pts: m_frameEncoder->getPts(ms: time), timeBase: m_frameEncoder->getTimeBase()); |
| 217 | |
| 218 | m_recordingEngine.newTimeStamp(time: time / 1000); |
| 219 | |
| 220 | qCDebug(qLcFFmpegVideoEncoder) |
| 221 | << ">>> sending frame" << avFrame->pts << time << m_lastFrameTime; |
| 222 | int ret = m_frameEncoder->sendFrame(inputFrame: std::move(avFrame)); |
| 223 | if (ret < 0) { |
| 224 | qCDebug(qLcFFmpegVideoEncoder) << "error sending frame" << ret << err2str(errnum: ret); |
| 225 | emit m_recordingEngine.sessionError(code: QMediaRecorder::ResourceError, description: err2str(errnum: ret)); |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | bool VideoEncoder::checkIfCanPushFrame() const |
| 230 | { |
| 231 | if (m_encodingStarted) |
| 232 | return m_videoFrameQueue.size() < m_maxQueueSize; |
| 233 | if (!isFinished()) |
| 234 | return m_videoFrameQueue.empty(); |
| 235 | |
| 236 | return false; |
| 237 | } |
| 238 | |
| 239 | std::pair<qint64, qint64> VideoEncoder::frameTimeStamps(const QVideoFrame &frame) const |
| 240 | { |
| 241 | qint64 startTime = frame.startTime(); |
| 242 | qint64 endTime = frame.endTime(); |
| 243 | |
| 244 | if (startTime == -1) { |
| 245 | startTime = m_lastFrameTime; |
| 246 | endTime = -1; |
| 247 | } |
| 248 | |
| 249 | if (endTime == -1) { |
| 250 | qreal frameRate = frame.streamFrameRate(); |
| 251 | if (frameRate <= 0.) |
| 252 | frameRate = m_settings.videoFrameRate(); |
| 253 | |
| 254 | Q_ASSERT(frameRate > 0.f); |
| 255 | endTime = startTime + static_cast<qint64>(std::round(x: VideoFrameTimeBase / frameRate)); |
| 256 | } |
| 257 | |
| 258 | return { startTime, endTime }; |
| 259 | } |
| 260 | |
| 261 | } // namespace QFFmpeg |
| 262 | |
| 263 | QT_END_NAMESPACE |
| 264 | |