| 1 | // Copyright (C) 2021 The Qt Company Ltd. | 
| 2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only | 
| 3 |  | 
| 4 | #include "qvideowindow_p.h" | 
| 5 | #include <QPlatformSurfaceEvent> | 
| 6 | #include <qfile.h> | 
| 7 | #include <qpainter.h> | 
| 8 | #include <private/qguiapplication_p.h> | 
| 9 | #include <private/qmemoryvideobuffer_p.h> | 
| 10 | #include <private/qhwvideobuffer_p.h> | 
| 11 | #include <private/qmultimediautils_p.h> | 
| 12 | #include <private/qvideoframe_p.h> | 
| 13 | #include <qpa/qplatformintegration.h> | 
| 14 |  | 
| 15 | QT_BEGIN_NAMESPACE | 
| 16 |  | 
| 17 | static QSurface::SurfaceType platformSurfaceType() | 
| 18 | { | 
| 19 | #if defined(Q_OS_DARWIN) | 
| 20 |     return QSurface::MetalSurface; | 
| 21 | #elif defined (Q_OS_WIN) | 
| 22 |     return QSurface::Direct3DSurface; | 
| 23 | #endif | 
| 24 |  | 
| 25 |     auto *integration = QGuiApplicationPrivate::platformIntegration(); | 
| 26 |  | 
| 27 |     if (!integration->hasCapability(cap: QPlatformIntegration::OpenGL)) | 
| 28 |         return QSurface::RasterSurface; | 
| 29 |  | 
| 30 |     if (QCoreApplication::testAttribute(attribute: Qt::AA_ForceRasterWidgets)) | 
| 31 |         return QSurface::RasterSurface; | 
| 32 |  | 
| 33 |     if (integration->hasCapability(cap: QPlatformIntegration::RasterGLSurface)) | 
| 34 |         return QSurface::RasterGLSurface; | 
| 35 |  | 
| 36 |     return QSurface::OpenGLSurface; | 
| 37 | } | 
| 38 |  | 
| 39 | QVideoWindowPrivate::QVideoWindowPrivate(QVideoWindow *q) | 
| 40 |     : q(q), | 
| 41 |     m_sink(new QVideoSink) | 
| 42 | { | 
| 43 |     Q_ASSERT(q); | 
| 44 |  | 
| 45 |     if (QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::RhiBasedRendering)) { | 
| 46 |         auto surfaceType = ::platformSurfaceType(); | 
| 47 |         q->setSurfaceType(surfaceType); | 
| 48 |         switch (surfaceType) { | 
| 49 |         case QSurface::RasterSurface: | 
| 50 |         case QSurface::OpenVGSurface: | 
| 51 |             // can't use those surfaces, need to render in SW | 
| 52 |             m_graphicsApi = QRhi::Null; | 
| 53 |             break; | 
| 54 |         case QSurface::OpenGLSurface: | 
| 55 |         case QSurface::RasterGLSurface: | 
| 56 |             m_graphicsApi = QRhi::OpenGLES2; | 
| 57 |             break; | 
| 58 |         case QSurface::VulkanSurface: | 
| 59 |             m_graphicsApi = QRhi::Vulkan; | 
| 60 |             break; | 
| 61 |         case QSurface::MetalSurface: | 
| 62 |             m_graphicsApi = QRhi::Metal; | 
| 63 |             break; | 
| 64 |         case QSurface::Direct3DSurface: | 
| 65 |             m_graphicsApi = QRhi::D3D11; | 
| 66 |             break; | 
| 67 |         } | 
| 68 |     } | 
| 69 |  | 
| 70 |     QObject::connect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, context: q, slot: &QVideoWindow::setVideoFrame); | 
| 71 | } | 
| 72 |  | 
| 73 | QVideoWindowPrivate::~QVideoWindowPrivate() | 
| 74 | { | 
| 75 |     QObject::disconnect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, | 
| 76 |             receiver: q, slot: &QVideoWindow::setVideoFrame); | 
| 77 | } | 
| 78 |  | 
| 79 | static const float g_vw_quad[] = { | 
| 80 |     // 4 clockwise rotation of texture vertexes (the second pair) | 
| 81 |     // Rotation 0 | 
| 82 |     -1.f, -1.f,   0.f, 0.f, | 
| 83 |     -1.f, 1.f,    0.f, 1.f, | 
| 84 |     1.f, -1.f,    1.f, 0.f, | 
| 85 |     1.f, 1.f,     1.f, 1.f, | 
| 86 |     // Rotation 90 | 
| 87 |     -1.f, -1.f,   0.f, 1.f, | 
| 88 |     -1.f, 1.f,    1.f, 1.f, | 
| 89 |     1.f, -1.f,    0.f, 0.f, | 
| 90 |     1.f, 1.f,     1.f, 0.f, | 
| 91 |  | 
| 92 |     // Rotation 180 | 
| 93 |     -1.f, -1.f,   1.f, 1.f, | 
| 94 |     -1.f, 1.f,    1.f, 0.f, | 
| 95 |     1.f, -1.f,    0.f, 1.f, | 
| 96 |     1.f, 1.f,     0.f, 0.f, | 
| 97 |     // Rotation 270 | 
| 98 |     -1.f, -1.f,   1.f, 0.f, | 
| 99 |     -1.f, 1.f,    0.f, 0.f, | 
| 100 |     1.f, -1.f,    1.f, 1.f, | 
| 101 |     1.f, 1.f,     0.f, 1.f | 
| 102 | }; | 
| 103 |  | 
| 104 | static QShader vwGetShader(const QString &name) | 
| 105 | { | 
| 106 |     QFile f(name); | 
| 107 |     if (f.open(flags: QIODevice::ReadOnly)) | 
| 108 |         return QShader::fromSerialized(data: f.readAll()); | 
| 109 |  | 
| 110 |     return QShader(); | 
| 111 | } | 
| 112 |  | 
| 113 | void QVideoWindowPrivate::initRhi() | 
| 114 | { | 
| 115 |     if (m_graphicsApi == QRhi::Null) | 
| 116 |         return; | 
| 117 |  | 
| 118 |     QRhi::Flags rhiFlags = {};//QRhi::EnableDebugMarkers | QRhi::EnableProfiling; | 
| 119 |  | 
| 120 | #if QT_CONFIG(opengl) | 
| 121 |     if (m_graphicsApi == QRhi::OpenGLES2) { | 
| 122 |         m_fallbackSurface.reset(p: QRhiGles2InitParams::newFallbackSurface(format: q->format())); | 
| 123 |         QRhiGles2InitParams params; | 
| 124 |         params.fallbackSurface = m_fallbackSurface.get(); | 
| 125 |         params.window = q; | 
| 126 |         params.format = q->format(); | 
| 127 |         m_rhi.reset(p: QRhi::create(impl: QRhi::OpenGLES2, params: ¶ms, flags: rhiFlags)); | 
| 128 |     } | 
| 129 | #endif | 
| 130 |  | 
| 131 | #if QT_CONFIG(vulkan) | 
| 132 |     if (m_graphicsApi == QRhi::Vulkan) { | 
| 133 |         QRhiVulkanInitParams params; | 
| 134 |         params.inst = q->vulkanInstance(); | 
| 135 |         params.window = q; | 
| 136 |         m_rhi.reset(p: QRhi::create(impl: QRhi::Vulkan, params: ¶ms, flags: rhiFlags)); | 
| 137 |     } | 
| 138 | #endif | 
| 139 |  | 
| 140 | #ifdef Q_OS_WIN | 
| 141 |     if (m_graphicsApi == QRhi::D3D11) { | 
| 142 |         QRhiD3D11InitParams params; | 
| 143 |         params.enableDebugLayer = true; | 
| 144 |         m_rhi.reset(QRhi::create(QRhi::D3D11, ¶ms, rhiFlags)); | 
| 145 |     } | 
| 146 | #endif | 
| 147 |  | 
| 148 | #if defined(Q_OS_MACOS) || defined(Q_OS_IOS) | 
| 149 |     if (m_graphicsApi == QRhi::Metal) { | 
| 150 |         QRhiMetalInitParams params; | 
| 151 |         m_rhi.reset(QRhi::create(QRhi::Metal, ¶ms, rhiFlags)); | 
| 152 |     } | 
| 153 | #endif | 
| 154 |     if (!m_rhi) | 
| 155 |         return; | 
| 156 |  | 
| 157 |     m_swapChain.reset(p: m_rhi->newSwapChain()); | 
| 158 |     m_swapChain->setWindow(q); | 
| 159 |     m_renderPass.reset(p: m_swapChain->newCompatibleRenderPassDescriptor()); | 
| 160 |     m_swapChain->setRenderPassDescriptor(m_renderPass.get()); | 
| 161 |  | 
| 162 |     m_vertexBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Immutable, usage: QRhiBuffer::VertexBuffer, size: sizeof(g_vw_quad))); | 
| 163 |     m_vertexBuf->create(); | 
| 164 |     m_vertexBufReady = false; | 
| 165 |  | 
| 166 |     m_uniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); | 
| 167 |     m_uniformBuf->create(); | 
| 168 |  | 
| 169 |     m_textureSampler.reset(p: m_rhi->newSampler(magFilter: QRhiSampler::Linear, minFilter: QRhiSampler::Linear, mipmapMode: QRhiSampler::None, | 
| 170 |                                              addressU: QRhiSampler::ClampToEdge, addressV: QRhiSampler::ClampToEdge)); | 
| 171 |     m_textureSampler->create(); | 
| 172 |  | 
| 173 |     m_shaderResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); | 
| 174 |     m_subtitleResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); | 
| 175 |  | 
| 176 |     m_subtitleUniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); | 
| 177 |     m_subtitleUniformBuf->create(); | 
| 178 | } | 
| 179 |  | 
| 180 | void QVideoWindowPrivate::setupGraphicsPipeline(QRhiGraphicsPipeline *pipeline, QRhiShaderResourceBindings *bindings, const QVideoFrameFormat &fmt) | 
| 181 | { | 
| 182 |  | 
| 183 |     pipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip); | 
| 184 |     QShader vs = vwGetShader(name: QVideoTextureHelper::vertexShaderFileName(format: fmt)); | 
| 185 |     Q_ASSERT(vs.isValid()); | 
| 186 |     QShader fs = vwGetShader(name: QVideoTextureHelper::fragmentShaderFileName( | 
| 187 |             format: fmt, rhi: m_rhi.get(), surfaceFormat: m_swapChain->format())); | 
| 188 |     Q_ASSERT(fs.isValid()); | 
| 189 |     pipeline->setShaderStages({ | 
| 190 |         { QRhiShaderStage::Vertex, vs }, | 
| 191 |         { QRhiShaderStage::Fragment, fs } | 
| 192 |     }); | 
| 193 |     QRhiVertexInputLayout inputLayout; | 
| 194 |     inputLayout.setBindings({ | 
| 195 |         { 4 * sizeof(float) } | 
| 196 |     }); | 
| 197 |     inputLayout.setAttributes({ | 
| 198 |         { 0, 0, QRhiVertexInputAttribute::Float2, 0 }, | 
| 199 |         { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) } | 
| 200 |     }); | 
| 201 |     pipeline->setVertexInputLayout(inputLayout); | 
| 202 |     pipeline->setShaderResourceBindings(bindings); | 
| 203 |     pipeline->setRenderPassDescriptor(m_renderPass.get()); | 
| 204 |     pipeline->create(); | 
| 205 | } | 
| 206 |  | 
| 207 | void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub) | 
| 208 | { | 
| 209 |     // We render a 1x1 black pixel when we don't have a video | 
| 210 |     if (!m_texturePool.currentFrame().isValid()) | 
| 211 |         m_texturePool.setCurrentFrame(QVideoFramePrivate::createFrame( | 
| 212 |                 buffer: std::make_unique<QMemoryVideoBuffer>(args: QByteArray{ 4, 0 }, args: 4), | 
| 213 |                 format: QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888))); | 
| 214 |  | 
| 215 |     if (!m_texturePool.texturesDirty()) | 
| 216 |         return; | 
| 217 |  | 
| 218 |     QVideoFrameTextures *textures = m_texturePool.updateTextures(rhi&: *m_rhi, rub&: *rub); | 
| 219 |     if (!textures) | 
| 220 |         return; | 
| 221 |  | 
| 222 |     QRhiShaderResourceBinding bindings[4]; | 
| 223 |     auto *b = bindings; | 
| 224 |     *(b++) = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, | 
| 225 |                                                    buf: m_uniformBuf.get()); | 
| 226 |  | 
| 227 |     auto fmt = m_texturePool.currentFrame().surfaceFormat(); | 
| 228 |     auto textureDesc = QVideoTextureHelper::textureDescription(format: fmt.pixelFormat()); | 
| 229 |  | 
| 230 |     for (int i = 0; i < textureDesc->nplanes; ++i) | 
| 231 |         (*b++) = QRhiShaderResourceBinding::sampledTexture( | 
| 232 |                 binding: i + 1, stage: QRhiShaderResourceBinding::FragmentStage, tex: textures->texture(plane: i), | 
| 233 |                 sampler: m_textureSampler.get()); | 
| 234 |     m_shaderResourceBindings->setBindings(first: bindings, last: b); | 
| 235 |     m_shaderResourceBindings->create(); | 
| 236 |  | 
| 237 |     if (fmt != format) { | 
| 238 |         format = fmt; | 
| 239 |         if (!m_graphicsPipeline) | 
| 240 |             m_graphicsPipeline.reset(p: m_rhi->newGraphicsPipeline()); | 
| 241 |  | 
| 242 |         setupGraphicsPipeline(pipeline: m_graphicsPipeline.get(), bindings: m_shaderResourceBindings.get(), fmt: format); | 
| 243 |     } | 
| 244 | } | 
| 245 |  | 
| 246 | void QVideoWindowPrivate::updateSubtitle(QRhiResourceUpdateBatch *rub, const QSize &frameSize) | 
| 247 | { | 
| 248 |     m_subtitleDirty = false; | 
| 249 |     m_hasSubtitle = !m_texturePool.currentFrame().subtitleText().isEmpty(); | 
| 250 |     if (!m_hasSubtitle) | 
| 251 |         return; | 
| 252 |  | 
| 253 |     m_subtitleLayout.update(frameSize, text: m_texturePool.currentFrame().subtitleText()); | 
| 254 |     QSize size = m_subtitleLayout.bounds.size().toSize(); | 
| 255 |  | 
| 256 |     QImage img = m_subtitleLayout.toImage(); | 
| 257 |  | 
| 258 |     m_subtitleTexture.reset(p: m_rhi->newTexture(format: QRhiTexture::RGBA8, pixelSize: size)); | 
| 259 |     m_subtitleTexture->create(); | 
| 260 |     rub->uploadTexture(tex: m_subtitleTexture.get(), image: img); | 
| 261 |  | 
| 262 |     QRhiShaderResourceBinding bindings[2]; | 
| 263 |  | 
| 264 |     bindings[0] = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, | 
| 265 |                                                            buf: m_subtitleUniformBuf.get()); | 
| 266 |  | 
| 267 |     bindings[1] = QRhiShaderResourceBinding::sampledTexture(binding: 1, stage: QRhiShaderResourceBinding::FragmentStage, | 
| 268 |                                                             tex: m_subtitleTexture.get(), sampler: m_textureSampler.get()); | 
| 269 |     m_subtitleResourceBindings->setBindings(first: bindings, last: bindings + 2); | 
| 270 |     m_subtitleResourceBindings->create(); | 
| 271 |  | 
| 272 |     if (!m_subtitlePipeline) { | 
| 273 |         m_subtitlePipeline.reset(p: m_rhi->newGraphicsPipeline()); | 
| 274 |  | 
| 275 |         QRhiGraphicsPipeline::TargetBlend blend; | 
| 276 |         blend.enable = true; | 
| 277 |         m_subtitlePipeline->setTargetBlends({ blend }); | 
| 278 |         setupGraphicsPipeline(pipeline: m_subtitlePipeline.get(), bindings: m_subtitleResourceBindings.get(), fmt: QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888)); | 
| 279 |     } | 
| 280 | } | 
| 281 |  | 
| 282 | void QVideoWindowPrivate::init() | 
| 283 | { | 
| 284 |     if (initialized) | 
| 285 |         return; | 
| 286 |     initialized = true; | 
| 287 |  | 
| 288 |     initRhi(); | 
| 289 |  | 
| 290 |     if (!m_rhi) | 
| 291 |         backingStore = new QBackingStore(q); | 
| 292 |     else | 
| 293 |         m_sink->setRhi(m_rhi.get()); | 
| 294 | } | 
| 295 |  | 
| 296 | void QVideoWindowPrivate::resizeSwapChain() | 
| 297 | { | 
| 298 |     m_hasSwapChain = m_swapChain->createOrResize(); | 
| 299 | } | 
| 300 |  | 
| 301 | void QVideoWindowPrivate::releaseSwapChain() | 
| 302 | { | 
| 303 |     if (m_hasSwapChain) { | 
| 304 |         m_hasSwapChain = false; | 
| 305 |         m_swapChain->destroy(); | 
| 306 |     } | 
| 307 | } | 
| 308 |  | 
| 309 | void QVideoWindowPrivate::render() | 
| 310 | { | 
| 311 |     if (!initialized) | 
| 312 |         init(); | 
| 313 |  | 
| 314 |     if (!q->isExposed() || !isExposed) | 
| 315 |         return; | 
| 316 |  | 
| 317 |     QRect rect(0, 0, q->width(), q->height()); | 
| 318 |  | 
| 319 |     if (backingStore) { | 
| 320 |         if (backingStore->size() != q->size()) | 
| 321 |             backingStore->resize(size: q->size()); | 
| 322 |  | 
| 323 |         backingStore->beginPaint(rect); | 
| 324 |  | 
| 325 |         QPaintDevice *device = backingStore->paintDevice(); | 
| 326 |         if (!device) | 
| 327 |             return; | 
| 328 |         QPainter painter(device); | 
| 329 |  | 
| 330 |         QVideoFrame frame = m_texturePool.currentFrame(); | 
| 331 |         frame.paint(painter: &painter, rect, options: { .backgroundColor: Qt::black, .aspectRatioMode: aspectRatioMode }); | 
| 332 |         painter.end(); | 
| 333 |  | 
| 334 |         backingStore->endPaint(); | 
| 335 |         backingStore->flush(region: rect); | 
| 336 |         return; | 
| 337 |     } | 
| 338 |  | 
| 339 |     const VideoTransformation frameTransformation = | 
| 340 |             qNormalizedFrameTransformation(frame: m_texturePool.currentFrame().surfaceFormat()); | 
| 341 |     const QSize frameSize = qRotatedFramePresentationSize(frame: m_texturePool.currentFrame()); | 
| 342 |     const QSize scaled = frameSize.scaled(s: rect.size(), mode: aspectRatioMode); | 
| 343 |     QRect videoRect = QRect(QPoint(0, 0), scaled); | 
| 344 |     videoRect.moveCenter(p: rect.center()); | 
| 345 |     QRect subtitleRect = videoRect.intersected(other: rect); | 
| 346 |  | 
| 347 |     if (!m_hasSwapChain || (m_swapChain->currentPixelSize() != m_swapChain->surfacePixelSize())) | 
| 348 |         resizeSwapChain(); | 
| 349 |  | 
| 350 |     const auto requiredSwapChainFormat = | 
| 351 |             qGetRequiredSwapChainFormat(format: m_texturePool.currentFrame().surfaceFormat()); | 
| 352 |     if (qShouldUpdateSwapChainFormat(swapChain: m_swapChain.get(), requiredSwapChainFormat)) { | 
| 353 |         releaseSwapChain(); | 
| 354 |         m_swapChain->setFormat(requiredSwapChainFormat); | 
| 355 |         resizeSwapChain(); | 
| 356 |     } | 
| 357 |  | 
| 358 |     if (!m_hasSwapChain) | 
| 359 |         return; | 
| 360 |  | 
| 361 |     QRhi::FrameOpResult r = m_rhi->beginFrame(swapChain: m_swapChain.get()); | 
| 362 |  | 
| 363 |     if (r == QRhi::FrameOpSwapChainOutOfDate) { | 
| 364 |         resizeSwapChain(); | 
| 365 |         if (!m_hasSwapChain) | 
| 366 |             return; | 
| 367 |         r = m_rhi->beginFrame(swapChain: m_swapChain.get()); | 
| 368 |     } | 
| 369 |     if (r != QRhi::FrameOpSuccess) { | 
| 370 |         qWarning(msg: "beginFrame failed with %d, retry" , r); | 
| 371 |         q->requestUpdate(); | 
| 372 |         return; | 
| 373 |     } | 
| 374 |  | 
| 375 |     QRhiResourceUpdateBatch *rub = m_rhi->nextResourceUpdateBatch(); | 
| 376 |  | 
| 377 |     if (!m_vertexBufReady) { | 
| 378 |         m_vertexBufReady = true; | 
| 379 |         rub->uploadStaticBuffer(buf: m_vertexBuf.get(), data: g_vw_quad); | 
| 380 |     } | 
| 381 |  | 
| 382 |     updateTextures(rub); | 
| 383 |  | 
| 384 |     if (m_subtitleDirty || m_subtitleLayout.videoSize != subtitleRect.size()) | 
| 385 |         updateSubtitle(rub, frameSize: subtitleRect.size()); | 
| 386 |  | 
| 387 |     const float mirrorFrame = frameTransformation.mirrorredHorizontallyAfterRotation ? -1.f : 1.f; | 
| 388 |     const float xscale = mirrorFrame * float(videoRect.width()) / float(rect.width()); | 
| 389 |     const float yscale = -1.f * float(videoRect.height()) / float(rect.height()); | 
| 390 |  | 
| 391 |     QMatrix4x4 transform; | 
| 392 |     transform.scale(x: xscale, y: yscale); | 
| 393 |  | 
| 394 |     float maxNits = 100; | 
| 395 |     if (m_swapChain->format() == QRhiSwapChain::HDRExtendedSrgbLinear) { | 
| 396 |         auto info = m_swapChain->hdrInfo(); | 
| 397 |         if (info.limitsType == QRhiSwapChainHdrInfo::ColorComponentValue) | 
| 398 |             maxNits = 100 * info.limits.colorComponentValue.maxColorComponentValue; | 
| 399 |         else | 
| 400 |             maxNits = info.limits.luminanceInNits.maxLuminance; | 
| 401 |     } | 
| 402 |  | 
| 403 |     QByteArray uniformData; | 
| 404 |     QVideoTextureHelper::updateUniformData(dst: &uniformData, rhi: m_rhi.get(), | 
| 405 |                                            format: m_texturePool.currentFrame().surfaceFormat(), | 
| 406 |                                            frame: m_texturePool.currentFrame(), transform, opacity: 1.f, maxNits); | 
| 407 |     rub->updateDynamicBuffer(buf: m_uniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); | 
| 408 |  | 
| 409 |     if (m_hasSubtitle) { | 
| 410 |         QMatrix4x4 st; | 
| 411 |         st.translate(x: 0, y: -2.f * (float(m_subtitleLayout.bounds.center().y())  + float(subtitleRect.top()))/ float(rect.height()) + 1.f); | 
| 412 |         st.scale(x: float(m_subtitleLayout.bounds.width())/float(rect.width()), | 
| 413 |                 y: -1.f * float(m_subtitleLayout.bounds.height())/float(rect.height())); | 
| 414 |  | 
| 415 |         QByteArray uniformData; | 
| 416 |         QVideoFrameFormat fmt(m_subtitleLayout.bounds.size().toSize(), QVideoFrameFormat::Format_ARGB8888); | 
| 417 |         QVideoTextureHelper::updateUniformData(dst: &uniformData, rhi: m_rhi.get(), format: fmt, frame: QVideoFrame(), transform: st, | 
| 418 |                                                opacity: 1.f); | 
| 419 |         rub->updateDynamicBuffer(buf: m_subtitleUniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); | 
| 420 |     } | 
| 421 |  | 
| 422 |     QRhiCommandBuffer *cb = m_swapChain->currentFrameCommandBuffer(); | 
| 423 |     cb->beginPass(rt: m_swapChain->currentFrameRenderTarget(), colorClearValue: Qt::black, depthStencilClearValue: { 1.0f, 0 }, resourceUpdates: rub); | 
| 424 |     cb->setGraphicsPipeline(m_graphicsPipeline.get()); | 
| 425 |     auto size = m_swapChain->currentPixelSize(); | 
| 426 |     cb->setViewport({ 0, 0, float(size.width()), float(size.height()) }); | 
| 427 |     cb->setShaderResources(srb: m_shaderResourceBindings.get()); | 
| 428 |  | 
| 429 |     const quint32 vertexOffset = quint32(sizeof(float)) * 16 * frameTransformation.rotationIndex(); | 
| 430 |     const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), vertexOffset); | 
| 431 |     cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); | 
| 432 |     cb->draw(vertexCount: 4); | 
| 433 |  | 
| 434 |     if (m_hasSubtitle) { | 
| 435 |         cb->setGraphicsPipeline(m_subtitlePipeline.get()); | 
| 436 |         cb->setShaderResources(srb: m_subtitleResourceBindings.get()); | 
| 437 |         const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), 0); | 
| 438 |         cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); | 
| 439 |         cb->draw(vertexCount: 4); | 
| 440 |     } | 
| 441 |  | 
| 442 |     cb->endPass(); | 
| 443 |  | 
| 444 |     m_rhi->endFrame(swapChain: m_swapChain.get()); | 
| 445 |  | 
| 446 |     m_texturePool.onFrameEndInvoked(); | 
| 447 | } | 
| 448 |  | 
| 449 | /*! | 
| 450 |     \class QVideoWindow | 
| 451 |     \internal | 
| 452 | */ | 
| 453 | QVideoWindow::QVideoWindow(QScreen *screen) | 
| 454 |     : QWindow(screen) | 
| 455 |     , d(new QVideoWindowPrivate(this)) | 
| 456 | { | 
| 457 | } | 
| 458 |  | 
| 459 | QVideoWindow::QVideoWindow(QWindow *parent) | 
| 460 |     : QWindow(parent) | 
| 461 |     , d(new QVideoWindowPrivate(this)) | 
| 462 | { | 
| 463 | } | 
| 464 |  | 
| 465 | QVideoWindow::~QVideoWindow() = default; | 
| 466 |  | 
| 467 | QVideoSink *QVideoWindow::videoSink() const | 
| 468 | { | 
| 469 |     return d->m_sink.get(); | 
| 470 | } | 
| 471 |  | 
| 472 | Qt::AspectRatioMode QVideoWindow::aspectRatioMode() const | 
| 473 | { | 
| 474 |     return d->aspectRatioMode; | 
| 475 | } | 
| 476 |  | 
| 477 | void QVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode) | 
| 478 | { | 
| 479 |     if (d->aspectRatioMode == mode) | 
| 480 |         return; | 
| 481 |     d->aspectRatioMode = mode; | 
| 482 |     emit aspectRatioModeChanged(mode); | 
| 483 | } | 
| 484 |  | 
| 485 | bool QVideoWindow::event(QEvent *e) | 
| 486 | { | 
| 487 |     switch (e->type()) { | 
| 488 |     case QEvent::UpdateRequest: | 
| 489 |         d->render(); | 
| 490 |         return true; | 
| 491 |  | 
| 492 |     case QEvent::PlatformSurface: | 
| 493 |         // this is the proper time to tear down the swapchain (while the native window and surface are still around) | 
| 494 |         if (static_cast<QPlatformSurfaceEvent *>(e)->surfaceEventType() == QPlatformSurfaceEvent::SurfaceAboutToBeDestroyed) { | 
| 495 |             d->releaseSwapChain(); | 
| 496 |             d->isExposed = false; | 
| 497 |         } | 
| 498 |         break; | 
| 499 |     case QEvent::Expose: | 
| 500 |         d->isExposed = isExposed(); | 
| 501 |         if (d->isExposed) | 
| 502 |             d->render(); | 
| 503 |         return true; | 
| 504 |  | 
| 505 |     default: | 
| 506 |         break; | 
| 507 |     } | 
| 508 |  | 
| 509 |     return QWindow::event(e); | 
| 510 | } | 
| 511 |  | 
| 512 | void QVideoWindow::resizeEvent(QResizeEvent *resizeEvent) | 
| 513 | { | 
| 514 |     if (!d->backingStore) | 
| 515 |         return; | 
| 516 |     if (!d->initialized) | 
| 517 |         d->init(); | 
| 518 |     d->backingStore->resize(size: resizeEvent->size()); | 
| 519 | } | 
| 520 |  | 
| 521 | void QVideoWindow::setVideoFrame(const QVideoFrame &frame) | 
| 522 | { | 
| 523 |     if (d->m_texturePool.currentFrame().subtitleText() != frame.subtitleText()) | 
| 524 |         d->m_subtitleDirty = true; | 
| 525 |     d->m_texturePool.setCurrentFrame(frame); | 
| 526 |     if (d->isExposed) | 
| 527 |         requestUpdate(); | 
| 528 | } | 
| 529 |  | 
| 530 | QT_END_NAMESPACE | 
| 531 |  | 
| 532 | #include "moc_qvideowindow_p.cpp" | 
| 533 |  |