Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
qffmpegvideobuffer.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "private/qvideotexturehelper_p.h"
6#include "private/qmultimediautils_p.h"
7#include "qffmpeghwaccel_p.h"
8#include "qloggingcategory.h"
9
10extern "C" {
11#include <libavutil/pixdesc.h>
12#include <libavutil/hdr_dynamic_metadata.h>
13#include <libavutil/mastering_display_metadata.h>
14}
15
17
18static bool isFrameFlipped(const AVFrame& frame) {
19 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
20 if (frame.linesize[i] < 0)
21 return true;
22 }
23
24 return false;
25}
26
27static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
28
31 m_frame(frame.get()),
33 { pixelAspectRatio.num, pixelAspectRatio.den }))
34{
35 if (frame->hw_frames_ctx) {
36 m_hwFrame = std::move(frame);
37 m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(m_hwFrame.get()));
38 return;
39 }
40
41 m_swFrame = std::move(frame);
42 m_pixelFormat = toQtPixelFormat(AVPixelFormat(m_swFrame->format));
43
45}
46
48
50{
51 Q_ASSERT(m_swFrame);
52
53 const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
54 const auto targetAVPixelFormat = toAVPixelFormat(m_pixelFormat);
55
56 if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(*m_swFrame)
57 || m_size != QSize(m_swFrame->width, m_swFrame->height)) {
58 Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
59 // convert the format into something we can handle
60 SwsContext *c = sws_getContext(m_swFrame->width, m_swFrame->height, actualAVPixelFormat,
61 m_size.width(), m_size.height(), targetAVPixelFormat,
62 SWS_BICUBIC, nullptr, nullptr, nullptr);
63
64 auto newFrame = QFFmpeg::makeAVFrame();
65 newFrame->width = m_size.width();
66 newFrame->height = m_size.height();
67 newFrame->format = targetAVPixelFormat;
68 av_frame_get_buffer(newFrame.get(), 0);
69
70 sws_scale(c, m_swFrame->data, m_swFrame->linesize, 0, m_swFrame->height, newFrame->data, newFrame->linesize);
71 if (m_frame == m_swFrame.get())
72 m_frame = newFrame.get();
73 m_swFrame = std::move(newFrame);
74 sws_freeContext(c);
75 }
76}
77
79{
80 m_textureConverter = converter;
81 m_textureConverter.init(m_hwFrame.get());
83}
84
86{
87 switch (m_frame->colorspace) {
88 default:
89 case AVCOL_SPC_UNSPECIFIED:
90 case AVCOL_SPC_RESERVED:
91 case AVCOL_SPC_FCC:
92 case AVCOL_SPC_SMPTE240M:
93 case AVCOL_SPC_YCGCO:
94 case AVCOL_SPC_SMPTE2085:
95 case AVCOL_SPC_CHROMA_DERIVED_NCL:
96 case AVCOL_SPC_CHROMA_DERIVED_CL:
97 case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
99 case AVCOL_SPC_RGB:
101 case AVCOL_SPC_BT709:
103 case AVCOL_SPC_BT470BG: // BT601
104 case AVCOL_SPC_SMPTE170M: // Also BT601
106 case AVCOL_SPC_BT2020_NCL: // Non constant luminence
107 case AVCOL_SPC_BT2020_CL: // Constant luminence
109 }
110}
111
116
118{
119 switch (m_frame->color_range) {
120 case AVCOL_RANGE_MPEG:
122 case AVCOL_RANGE_JPEG:
124 default:
126 }
127}
128
130{
131 float maxNits = -1;
132 for (int i = 0; i < m_frame->nb_side_data; ++i) {
133 AVFrameSideData *sd = m_frame->side_data[i];
134 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
135 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
136 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
137 auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
138 if (maybeLum)
139 maxNits = float(maybeLum.value());
140 }
141 }
142 return maxNits;
143}
144
146{
147 return m_mode;
148}
149
151{
152 if (!m_swFrame) {
153 Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
154 m_swFrame = QFFmpeg::makeAVFrame();
155 /* retrieve data from GPU to CPU */
156 int ret = av_hwframe_transfer_data(m_swFrame.get(), m_hwFrame.get(), 0);
157 if (ret < 0) {
158 qWarning() << "Error transferring the data to system memory:" << ret;
159 return {};
160 }
162 }
163
164 m_mode = mode;
165
168 mapData.nPlanes = desc->nplanes;
169 for (int i = 0; i < mapData.nPlanes; ++i) {
170 Q_ASSERT(m_swFrame->linesize[i] >= 0);
171
172 mapData.data[i] = m_swFrame->data[i];
173 mapData.bytesPerLine[i] = m_swFrame->linesize[i];
174 mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
175 }
176
177 if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
179 m_hwFrame.reset();
180 if (m_textures) {
181 qCDebug(qLcFFmpegVideoBuffer)
182 << "Mapping of FFmpeg video buffer with write mode when "
183 "textures have been created. Visual artifacts might "
184 "happen if the frame is still in the rendering pipeline";
185 m_textures.reset();
186 }
187 }
188
189 return mapData;
190}
191
193{
194 // nothing to do here for SW buffers.
195 // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
196 m_mode = QVideoFrame::NotMapped;
197}
198
199std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
200{
201 if (m_textures)
202 return {};
203 if (!m_hwFrame)
204 return {};
205 if (m_textureConverter.isNull()) {
206 m_textures = nullptr;
207 return {};
208 }
209
210 m_textures.reset(m_textureConverter.getTextures(m_hwFrame.get()));
211 if (!m_textures) {
212 static thread_local int lastFormat = 0;
213 if (std::exchange(lastFormat, m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
214 qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
215 }
216 return {};
217}
218
220{
221 return m_textures ? m_textures->textureHandle(rhi, plane) : 0;
222}
223
225{
226 return m_pixelFormat;
227}
228
230{
231 return m_size;
232}
233
234QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
235{
236 if (needsConversion)
237 *needsConversion = false;
238
239 switch (avPixelFormat) {
240 default:
241 break;
242 case AV_PIX_FMT_NONE:
243 Q_ASSERT(!"Invalid avPixelFormat!");
245 case AV_PIX_FMT_ARGB:
247 case AV_PIX_FMT_0RGB:
249 case AV_PIX_FMT_BGRA:
251 case AV_PIX_FMT_BGR0:
253 case AV_PIX_FMT_ABGR:
255 case AV_PIX_FMT_0BGR:
257 case AV_PIX_FMT_RGBA:
259 case AV_PIX_FMT_RGB0:
261
262 case AV_PIX_FMT_YUV422P:
264 case AV_PIX_FMT_YUV420P:
266 case AV_PIX_FMT_YUV420P10:
268 case AV_PIX_FMT_UYVY422:
270 case AV_PIX_FMT_YUYV422:
272 case AV_PIX_FMT_NV12:
274 case AV_PIX_FMT_NV21:
276 case AV_PIX_FMT_GRAY8:
278 case AV_PIX_FMT_GRAY16:
280
281 case AV_PIX_FMT_P010:
283 case AV_PIX_FMT_P016:
285 case AV_PIX_FMT_MEDIACODEC:
287 }
288
289 if (needsConversion)
290 *needsConversion = true;
291
292 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
293
294 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
296
297 if (descriptor->comp[0].depth > 8)
300}
301
303{
304 switch (pixelFormat) {
305 default:
314 return AV_PIX_FMT_NONE;
316 // We're using the data from the converted QImage here, which is in BGRA.
317 return AV_PIX_FMT_BGRA;
319 return AV_PIX_FMT_ARGB;
322 return AV_PIX_FMT_0RGB;
324 return AV_PIX_FMT_BGRA;
327 return AV_PIX_FMT_BGR0;
329 return AV_PIX_FMT_ABGR;
331 return AV_PIX_FMT_0BGR;
333 return AV_PIX_FMT_RGBA;
334 // to be added in 6.8:
335 // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
337 return AV_PIX_FMT_RGB0;
338
340 return AV_PIX_FMT_YUV422P;
342 return AV_PIX_FMT_YUV420P;
344 return AV_PIX_FMT_YUV420P10;
346 return AV_PIX_FMT_UYVY422;
348 return AV_PIX_FMT_YUYV422;
350 return AV_PIX_FMT_NV12;
352 return AV_PIX_FMT_NV21;
354 return AV_PIX_FMT_GRAY8;
356 return AV_PIX_FMT_GRAY16;
357
359 return AV_PIX_FMT_P010;
361 return AV_PIX_FMT_P016;
362
364 return AV_PIX_FMT_MEDIACODEC;
365 }
366}
367
The QAbstractVideoBuffer class is an abstraction for video data. \inmodule QtMultimedia.
QRhi * rhi() const
Returns the QRhi instance.
QVideoFrame::HandleType m_type
QVideoFrameFormat::PixelFormat pixelFormat() const
QVideoFrameFormat::ColorSpace colorSpace() const
void unmap() override
Releases the memory mapped by the map() function.
MapData map(QVideoFrame::MapMode mode) override
Independently maps the planes of a video buffer to memory.
QVideoFrameFormat::ColorTransfer colorTransfer() const
QFFmpeg::AVFrameUPtr AVFrameUPtr
void setTextureConverter(const QFFmpeg::TextureConverter &converter)
virtual std::unique_ptr< QVideoFrameTextures > mapTextures(QRhi *) override
static AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
QVideoFrameFormat::ColorRange colorRange() const
static QVideoFrameFormat::PixelFormat toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion=nullptr)
QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio={ 1, 1 })
~QFFmpegVideoBuffer() override
virtual quint64 textureHandle(QRhi *rhi, int plane) const override
Returns a texture handle to the data buffer.
QVideoFrame::MapMode mapMode() const override
static AVPixelFormat format(AVFrame *frame)
TextureSet * getTextures(AVFrame *frame)
void init(AVFrame *frame)
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:1804
\inmodule QtCore
Definition qsize.h:25
constexpr int height() const noexcept
Returns the height.
Definition qsize.h:133
constexpr int width() const noexcept
Returns the width.
Definition qsize.h:130
ColorSpace
Enumerates the color space of video frames.
ColorTransfer
\value ColorTransfer_Unknown The color transfer function is unknown.
PixelFormat
Enumerates video data types.
ColorRange
Describes the color range used by the video data.
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:27
MapMode
Enumerates how a video buffer's data is mapped to system memory.
Definition qvideoframe.h:37
int width
the width of the widget excluding any window frame
Definition qwidget.h:114
int height
the height of the widget excluding any window frame
Definition qwidget.h:115
AVFrameUPtr makeAVFrame()
Definition qffmpeg_p.h:136
QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc)
Definition qffmpeg.cpp:590
std::optional< qint64 > mul(qint64 a, AVRational b)
Definition qffmpeg_p.h:39
Combined button and popup list for selecting options.
const TextureDescription * textureDescription(QVideoFrameFormat::PixelFormat format)
static QDBusError::ErrorType get(const char *name)
static QT_BEGIN_NAMESPACE bool isFrameFlipped(const AVFrame &frame)
#define qWarning
Definition qlogging.h:166
#define Q_LOGGING_CATEGORY(name,...)
#define qCDebug(category,...)
return ret
QSize qCalculateFrameSize(QSize resolution, Fraction par)
GLenum mode
GLint GLsizei GLsizei GLenum GLenum GLsizei void * data
const GLubyte * c
static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame, unsigned char *baseAddress)
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
unsigned long long quint64
Definition qtypes.h:61
QFrame frame
[0]