Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
avfcamerarenderer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "private/qabstractvideobuffer_p.h"
5#include "private/qcameradevice_p.h"
9#include "avfcameradebug_p.h"
10#include "avfcamera_p.h"
11#include <avfvideosink_p.h>
12#include <avfvideobuffer_p.h>
13#include "qvideosink.h"
14#include "qavfhelpers_p.h"
15
16#include <rhi/qrhi.h>
17
18#import <AVFoundation/AVFoundation.h>
19
20#ifdef Q_OS_IOS
21#include <QtGui/qopengl.h>
22#endif
23
24#include <private/qabstractvideobuffer_p.h>
25
26#include <QtMultimedia/qvideoframeformat.h>
27
29
30@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
31
32- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer;
33
34- (void) captureOutput:(AVCaptureOutput *)captureOutput
35 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
36 fromConnection:(AVCaptureConnection *)connection;
37
38@end
39
40@implementation AVFCaptureFramesDelegate
41{
42@private
43 AVFCameraRenderer *m_renderer;
44}
45
46- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer
47{
48 if (!(self = [super init]))
49 return nil;
50
51 self->m_renderer = renderer;
52 return self;
53}
54
55- (void)captureOutput:(AVCaptureOutput *)captureOutput
56 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
57 fromConnection:(AVCaptureConnection *)connection
58{
60 Q_UNUSED(captureOutput);
61
62 // NB: on iOS captureOutput/connection can be nil (when recording a video -
63 // avfmediaassetwriter).
64
65 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
66 AVFVideoBuffer *buffer = new AVFVideoBuffer(m_renderer, imageBuffer);
67 auto format = buffer->videoFormat();
68 if (!format.isValid()) {
69 delete buffer;
70 return;
71 }
72
74 m_renderer->syncHandleViewfinderFrame(frame);
75}
76
77@end
78
80 : QObject(parent)
81{
82 m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
85}
86
88{
89 [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
91 [m_videoDataOutput release];
92
93 if (m_delegateQueue)
94 dispatch_release(m_delegateQueue);
95#ifdef Q_OS_IOS
96 if (m_textureCache)
97 CFRelease(m_textureCache);
98#endif
99}
100
102{
103 QMutexLocker lock(&m_vfMutex);
104
105 // ### This is a hack, need to use a reliable way to determine the size and not use the preview layer
106 if (m_layer)
107 m_sink->setNativeSize(QSize(m_layer.bounds.size.width, m_layer.bounds.size.height));
110}
111
113{
114 if (!m_videoDataOutput)
115 return;
116
117 if (m_cameraSession) {
118 const auto format = m_cameraSession->cameraFormat();
119 if (format.pixelFormat() != QVideoFrameFormat::Format_Invalid)
121 }
122
123 // If no output settings set from above,
124 // it's most likely because the rhi is OpenGL
125 // and the pixel format is not BGRA.
126 // We force this in the base class implementation
127 if (!m_outputSettings)
129
131 m_videoDataOutput.videoSettings = m_outputSettings;
132}
133
135{
136 m_cameraSession = cameraSession;
137 connect(m_cameraSession, SIGNAL(readyToConfigureConnections()),
138 this, SLOT(updateCaptureConnection()));
139
140 m_needsHorizontalMirroring = false;
141
142 m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
143
144 // Configure video output
145 m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
146 [m_videoDataOutput
147 setSampleBufferDelegate:m_viewfinderFramesDelegate
148 queue:m_delegateQueue];
149
150 [m_cameraSession->captureSession() addOutput:m_videoDataOutput];
151}
152
153void AVFCameraRenderer::updateCaptureConnection()
154{
155 AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
156 if (connection == nil || !m_cameraSession->videoCaptureDevice())
157 return;
158
159 // Frames of front-facing cameras should be mirrored horizontally (it's the default when using
160 // AVCaptureVideoPreviewLayer but not with AVCaptureVideoDataOutput)
161 if (connection.isVideoMirroringSupported)
162 connection.videoMirrored = m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
163
164 // If the connection does't support mirroring, we'll have to do it ourselves
165 m_needsHorizontalMirroring = !connection.isVideoMirrored
166 && m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
167
169}
170
172{
173 AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
174 if (connection == nil || !m_cameraSession->videoCaptureDevice())
175 return;
176
177 if (!connection.supportsVideoOrientation)
178 return;
179
180 if (angle < 0)
181 angle = m_orientationHandler.currentOrientation();
182
183 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
184 switch (angle) {
185 default:
186 break;
187 case 90:
188 orientation = AVCaptureVideoOrientationLandscapeRight;
189 break;
190 case 180:
191 // this keeps the last orientation, don't do anything
192 return;
193 case 270:
194 orientation = AVCaptureVideoOrientationLandscapeLeft;
195 break;
196 }
197
198 connection.videoOrientation = orientation;
199}
200
201//can be called from non main thread
203{
205
206 QMutexLocker lock(&m_vfMutex);
207
208 if (!m_lastViewfinderFrame.isValid()) {
209 static QMetaMethod handleViewfinderFrameSlot = metaObject()->method(
210 metaObject()->indexOfMethod("handleViewfinderFrame()"));
211
212 handleViewfinderFrameSlot.invoke(this, Qt::QueuedConnection);
213 }
214
215 m_lastViewfinderFrame = frame;
216}
217
218AVCaptureVideoDataOutput *AVFCameraRenderer::videoDataOutput() const
219{
220 return m_videoDataOutput;
221}
222
224{
225 return m_viewfinderFramesDelegate;
226}
227
229{
230 [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue];
231}
232
233void AVFCameraRenderer::handleViewfinderFrame()
234{
236 {
237 QMutexLocker lock(&m_vfMutex);
238 frame = m_lastViewfinderFrame;
239 m_lastViewfinderFrame = QVideoFrame();
240 }
241
242 if (m_sink && frame.isValid()) {
243 // ### pass format to surface
244 QVideoFrameFormat format = frame.surfaceFormat();
245 if (m_needsHorizontalMirroring)
246 format.setMirrored(true);
247
249 }
250}
251
254{
255 if (rhi() && rhi()->backend() == QRhi::OpenGLES2) {
256 if (pixelFormat != QVideoFrameFormat::Format_BGRA8888)
257 qWarning() << "OpenGL rhi backend only supports 32BGRA pixel format.";
258 return;
259 }
260
261 // Default to 32BGRA pixel formats on the viewfinder, in case the requested
262 // format can't be used (shouldn't happen unless the developers sets a wrong camera
263 // format on the camera).
264 auto cvPixelFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
265 if (cvPixelFormat == CvPixelFormatInvalid) {
266 cvPixelFormat = kCVPixelFormatType_32BGRA;
267 qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
268 }
269
270 bool isSupported = false;
271 NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
272 for (NSNumber *currentPixelFormat in supportedPixelFormats)
273 {
274 if ([currentPixelFormat unsignedIntValue] == cvPixelFormat) {
275 isSupported = true;
276 break;
277 }
278 }
279
280 if (isSupported) {
281 NSDictionary *outputSettings = @{
282 (NSString *)
283 kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:cvPixelFormat]
284#ifndef Q_OS_IOS // On iOS this key generates a warning about 'unsupported key'.
285 ,
286 (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
287#endif // Q_OS_IOS
288 };
290 [m_outputSettings release];
291 m_outputSettings = [[NSDictionary alloc] initWithDictionary:outputSettings];
292 } else {
293 qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
294 }
295}
296
297#include "moc_avfcamerarenderer_p.cpp"
298
void reconfigure() override
void syncHandleViewfinderFrame(const QVideoFrame &frame)
AVFCameraRenderer(QObject *parent=nullptr)
void newViewfinderFrame(const QVideoFrame &frame)
AVCaptureVideoDataOutput * videoDataOutput() const
void setOutputSettings() override
AVFCaptureFramesDelegate * captureDelegate() const
void configureAVCaptureSession(AVFCameraSession *cameraSession)
void setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat, QVideoFrameFormat::ColorRange colorRange)
void deviceOrientationChanged(int angle=-1)
void resetCaptureDelegate() const
AVCaptureDevice * videoCaptureDevice() const
QCameraFormat cameraFormat() const
NSDictionary * m_outputSettings
virtual void setOutputSettings()
AVFVideoSink * m_sink
void setNativeSize(QSize size)
static QVideoFrameFormat::ColorRange getColorRange(const QCameraFormat &format)
\inmodule QtCore
Definition qmetaobject.h:19
bool invoke(QObject *object, Qt::ConnectionType connectionType, QGenericReturnArgument returnValue, QGenericArgument val0=QGenericArgument(nullptr), QGenericArgument val1=QGenericArgument(), QGenericArgument val2=QGenericArgument(), QGenericArgument val3=QGenericArgument(), QGenericArgument val4=QGenericArgument(), QGenericArgument val5=QGenericArgument(), QGenericArgument val6=QGenericArgument(), QGenericArgument val7=QGenericArgument(), QGenericArgument val8=QGenericArgument(), QGenericArgument val9=QGenericArgument()) const
\obsolete [6.5] Please use the variadic overload of this function
\inmodule QtCore
Definition qmutex.h:313
\inmodule QtCore
Definition qobject.h:103
static QMetaObject::Connection connect(const QObject *sender, const char *signal, const QObject *receiver, const char *member, Qt::ConnectionType=Qt::AutoConnection)
\threadsafe
Definition qobject.cpp:2960
virtual void setVideoFrame(const QVideoFrame &frame)
@ OpenGLES2
Definition qrhi.h:1809
\inmodule QtCore
Definition qsize.h:25
The QVideoFrameFormat class specifies the stream format of a video presentation surface.
PixelFormat
Enumerates video data types.
ColorRange
Describes the color range used by the video data.
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:27
bool isValid() const
Identifies whether a video frame is valid.
void orientationChanged(int angle)
CvPixelFormat toCVPixelFormat(QVideoFrameFormat::PixelFormat pixFmt, QVideoFrameFormat::ColorRange colorRange)
@ QueuedConnection
constexpr CvPixelFormat CvPixelFormatInvalid
DBusConnection const char DBusError DBusBusType DBusError return DBusConnection DBusHandleMessageFunction void DBusFreeFunction return DBusConnection return DBusConnection return const char DBusError return DBusConnection DBusMessage dbus_uint32_t return DBusConnection dbus_bool_t DBusConnection DBusAddWatchFunction DBusRemoveWatchFunction DBusWatchToggledFunction void DBusFreeFunction return DBusConnection DBusDispatchStatusFunction void DBusFreeFunction DBusTimeout return DBusTimeout return DBusWatch return DBusWatch unsigned int return DBusError const DBusError return const DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessageIter int const void return DBusMessageIter DBusMessageIter return DBusMessageIter void DBusMessageIter void int return DBusMessage DBusMessageIter return DBusMessageIter return DBusMessageIter DBusMessageIter const char const char const char const char return DBusMessage return DBusMessage const char return DBusMessage dbus_bool_t return DBusMessage dbus_uint32_t return DBusMessage void
DBusConnection * connection
#define qWarning
Definition qlogging.h:166
#define SLOT(a)
Definition qobjectdefs.h:52
#define SIGNAL(a)
Definition qobjectdefs.h:53
GLenum GLuint buffer
GLfloat angle
GLint GLsizei GLsizei GLenum format
GLuint in
static QT_BEGIN_NAMESPACE void init(QTextBoundaryFinder::BoundaryType type, QStringView str, QCharAttributes *attributes)
#define Q_EMIT
#define Q_UNUSED(x)
obj metaObject() -> className()
QReadWriteLock lock
[0]
QFrame frame
[0]
QSvgRenderer * renderer
[0]