Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
avfcamerautility.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "avfcameradebug_p.h"
6
7#include <QtCore/qvector.h>
8#include <QtCore/qpair.h>
9#include <private/qmultimediautils_p.h>
10#include <private/qcameradevice_p.h>
11#include "avfvideobuffer_p.h"
12#include "qavfhelpers_p.h"
13
14#include <functional>
15#include <algorithm>
16#include <limits>
17#include <tuple>
18
20
21Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
22
23AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
24{
25 Q_ASSERT(videoConnection);
26
27 AVFPSRange newRange;
28 // "The value in the videoMinFrameDuration is equivalent to the reciprocal
29 // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
30 // to the reciprocal of the minimum framerate."
31 if (videoConnection.supportsVideoMinFrameDuration) {
32 const CMTime cmMin = videoConnection.videoMinFrameDuration;
33 if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
34 if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
35 newRange.second = 1. / minSeconds;
36 }
37 }
38
39 if (videoConnection.supportsVideoMaxFrameDuration) {
40 const CMTime cmMax = videoConnection.videoMaxFrameDuration;
41 if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
42 if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
43 newRange.first = 1. / maxSeconds;
44 }
45 }
46
47 return newRange;
48}
49
50namespace {
51
52inline bool qt_area_sane(const QSize &size)
53{
54 return !size.isNull() && size.isValid()
55 && std::numeric_limits<int>::max() / size.width() >= size.height();
56}
57
58template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
59struct ByResolution
60{
61 bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
62 {
63 Q_ASSERT(f1 && f2);
66 // use std::tuple for lexicograpical sorting:
67 const Comp<std::tuple<int, int>> op = {};
68 return op(std::make_tuple(r1.width(), r1.height()),
69 std::make_tuple(r2.width(), r2.height()));
70 }
71};
72
73struct FormatHasNoFPSRange
74{
75 bool operator() (AVCaptureDeviceFormat *format) const
76 {
78 return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
79 }
80};
81
82Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
83{
84 Q_ASSERT(format && format.videoSupportedFrameRateRanges
85 && format.videoSupportedFrameRateRanges.count);
86
87 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
88 Float64 distance = qAbs(range.maxFrameRate - fps);
89 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
90 range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
91 distance = qMin(distance, qAbs(range.maxFrameRate - fps));
92 }
93
94 return distance;
95}
96
97} // Unnamed namespace.
98
99AVCaptureDeviceFormat *
100qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
101 const QCameraFormat &cameraFormat,
102 const std::function<bool(uint32_t)> &cvFormatValidator)
103{
104 const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
105 if (!cameraFormatPrivate)
106 return nil;
107
108 const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
109 cameraFormatPrivate->colorRange);
110
111 if (requiredCvPixFormat == CvPixelFormatInvalid)
112 return nil;
113
114 AVCaptureDeviceFormat *newFormat = nil;
115 Float64 newFormatMaxFrameRate = {};
116 NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
117 for (AVCaptureDeviceFormat *format in formats) {
118 CMFormatDescriptionRef formatDesc = format.formatDescription;
119 CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
120 FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
121
122 if (cvPixFormat != requiredCvPixFormat)
123 continue;
124
125 if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
126 continue;
127
128 if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
129 continue;
130
131 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
132 if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate
133 && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate
134 && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
135 newFormat = format;
136 newFormatMaxFrameRate = frameRateRange.maxFrameRate;
137 }
138 }
139 }
140 return newFormat;
141}
142
143QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
144{
145 // 'filter' is the format we prefer if we have duplicates.
146 Q_ASSERT(captureDevice);
147
148 QVector<AVCaptureDeviceFormat *> formats;
149
150 if (!captureDevice.formats || !captureDevice.formats.count)
151 return formats;
152
153 formats.reserve(captureDevice.formats.count);
154 for (AVCaptureDeviceFormat *format in captureDevice.formats) {
155 const QSize resolution(qt_device_format_resolution(format));
156 if (resolution.isNull() || !resolution.isValid())
157 continue;
158 formats << format;
159 }
160
161 if (!formats.size())
162 return formats;
163
164 std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
165
167 FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
168 int last = 0;
169 for (int i = 1; i < formats.size(); ++i) {
170 const QSize nextSize(qt_device_format_resolution(formats[i]));
171 if (nextSize == size) {
172 if (codec == filter)
173 continue;
174 formats[last] = formats[i];
175 } else {
176 ++last;
177 formats[last] = formats[i];
178 size = nextSize;
179 }
180 codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
181 }
182 formats.resize(last + 1);
183
184 return formats;
185}
186
188{
189 if (!format || !format.formatDescription)
190 return QSize();
191
192 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
193 return QSize(res.width, res.height);
194}
195
197{
199 QSize res;
200#if defined(Q_OS_IOS)
201 const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
202 res.setWidth(hrDim.width);
203 res.setHeight(hrDim.height);
204#endif
205 return res;
206}
207
208QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
209{
211
212 QVector<AVFPSRange> qtRanges;
213
214 if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
215 return qtRanges;
216
217 qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
218 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
219 qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
220
221 return qtRanges;
222}
223
225{
227
228 if (!format.formatDescription) {
229 qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
230 return QSize();
231 }
232
233 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
234 const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
235
236 if (qAbs(resPAR.width - res.width) < 1.) {
237 // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
238 return QSize(1, 1);
239 }
240
241 if (!res.width || !resPAR.width)
242 return QSize();
243
244 auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
245 : resPAR.width / qreal(res.width));
246
247 return QSize(frac.numerator, frac.denominator);
248}
249
250AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice,
251 const QSize &request,
252 FourCharCode filter,
253 bool stillImage)
254{
255 Q_ASSERT(captureDevice);
256 Q_ASSERT(!request.isNull() && request.isValid());
257
258 if (!captureDevice.formats || !captureDevice.formats.count)
259 return nullptr;
260
261 QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
262
263 for (int i = 0; i < formats.size(); ++i) {
264 AVCaptureDeviceFormat *format = formats[i];
266 return format;
267 // iOS only (still images).
268 if (stillImage && qt_device_format_high_resolution(format) == request)
269 return format;
270 }
271
272 if (!qt_area_sane(request))
273 return nullptr;
274
275 typedef QPair<QSize, AVCaptureDeviceFormat *> FormatPair;
276
277 QVector<FormatPair> pairs; // default|HR sizes
278 pairs.reserve(formats.size());
279
280 for (int i = 0; i < formats.size(); ++i) {
281 AVCaptureDeviceFormat *format = formats[i];
283 if (!res.isNull() && res.isValid() && qt_area_sane(res))
284 pairs << FormatPair(res, format);
286 if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
287 pairs << FormatPair(highRes, format);
288 }
289
290 if (!pairs.size())
291 return nullptr;
292
293 AVCaptureDeviceFormat *best = pairs[0].second;
294 QSize next(pairs[0].first);
295 int wDiff = qAbs(request.width() - next.width());
296 int hDiff = qAbs(request.height() - next.height());
297 const int area = request.width() * request.height();
298 int areaDiff = qAbs(area - next.width() * next.height());
299 for (int i = 1; i < pairs.size(); ++i) {
300 next = pairs[i].first;
301 const int newWDiff = qAbs(next.width() - request.width());
302 const int newHDiff = qAbs(next.height() - request.height());
303 const int newAreaDiff = qAbs(area - next.width() * next.height());
304
305 if ((newWDiff < wDiff && newHDiff < hDiff)
306 || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
307 wDiff = newWDiff;
308 hDiff = newHDiff;
309 best = pairs[i].second;
310 areaDiff = newAreaDiff;
311 }
312 }
313
314 return best;
315}
316
317AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
318 FourCharCode filter,
319 Float64 fps)
320{
321 Q_ASSERT(captureDevice);
322 Q_ASSERT(fps > 0.);
323
324 const qreal epsilon = 0.1;
325
326 QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
327 // Sort formats by their resolution in decreasing order:
328 std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
329 // We can use only formats with framerate ranges:
330 sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
331
332 if (!sorted.size())
333 return nil;
334
335 for (int i = 0; i < sorted.size(); ++i) {
336 AVCaptureDeviceFormat *format = sorted[i];
337 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
338 if (range.maxFrameRate - range.minFrameRate < epsilon) {
339 // On OS X ranges are points (built-in camera).
340 if (qAbs(fps - range.maxFrameRate) < epsilon)
341 return format;
342 }
343
344 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
345 return format;
346 }
347 }
348
349 Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
350 AVCaptureDeviceFormat *match = sorted[0];
351 for (int i = 1; i < sorted.size(); ++i) {
352 const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
353 if (newDistance < distance) {
354 distance = newDistance;
355 match = sorted[i];
356 }
357 }
358
359 return match;
360}
361
362AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
363{
364 Q_ASSERT(format && format.videoSupportedFrameRateRanges
365 && format.videoSupportedFrameRateRanges.count);
366
367 const qreal epsilon = 0.1;
368
369 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
370 if (range.maxFrameRate - range.minFrameRate < epsilon) {
371 // On OS X ranges are points (built-in camera).
372 if (qAbs(fps - range.maxFrameRate) < epsilon)
373 return range;
374 }
375
376 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
377 return range;
378 }
379
380 AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
381 Float64 distance = qAbs(match.maxFrameRate - fps);
382 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
383 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
384 const Float64 newDistance = qAbs(range.maxFrameRate - fps);
385 if (newDistance < distance) {
386 distance = newDistance;
387 match = range;
388 }
389 }
390
391 return match;
392}
393
394bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
395{
396 if (format && fps > qreal(0)) {
397 const qreal epsilon = 0.1;
398 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
399 if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
400 return true;
401 }
402 }
403
404 return false;
405}
406
407bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
408{
409 if (f1 == f2)
410 return true;
411
412 if (![f1.mediaType isEqualToString:f2.mediaType])
413 return false;
414
415 return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
416}
417
418bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
419{
420 static bool firstSet = true;
421
422 if (!captureDevice || !format)
423 return false;
424
425 if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
426 if (firstSet) {
427 // The capture device format is persistent. The first time we set a format, report that
428 // it changed even if the formats are the same.
429 // This prevents the session from resetting the format to the default value.
430 firstSet = false;
431 return true;
432 }
433 return false;
434 }
435
436 firstSet = false;
437
438 const AVFConfigurationLock lock(captureDevice);
439 if (!lock) {
440 qWarning("Failed to set active format (lock failed)");
441 return false;
442 }
443
444 // Changing the activeFormat resets the frame rate.
445 AVFPSRange fps;
446 if (preserveFps)
447 fps = qt_current_framerates(captureDevice, nil);
448
449 captureDevice.activeFormat = format;
450
451 if (preserveFps)
452 qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
453
454 return true;
455}
456
457void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
458{
459 Q_ASSERT(videoConnection);
460
461 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
462 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
463 << minFPS << maxFPS;
464 return;
465 }
466
467 CMTime minDuration = kCMTimeInvalid;
468 if (maxFPS > 0.) {
469 if (!videoConnection.supportsVideoMinFrameDuration)
470 qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
471 else
472 minDuration = CMTimeMake(1, maxFPS);
473 }
474 if (videoConnection.supportsVideoMinFrameDuration)
475 videoConnection.videoMinFrameDuration = minDuration;
476
477 CMTime maxDuration = kCMTimeInvalid;
478 if (minFPS > 0.) {
479 if (!videoConnection.supportsVideoMaxFrameDuration)
480 qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
481 else
482 maxDuration = CMTimeMake(1, minFPS);
483 }
484 if (videoConnection.supportsVideoMaxFrameDuration)
485 videoConnection.videoMaxFrameDuration = maxDuration;
486}
487
488CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
489{
491 Q_ASSERT(fps > 0.);
492
493 if (range.maxFrameRate - range.minFrameRate < 0.1) {
494 // Can happen on OS X.
495 return range.minFrameDuration;
496 }
497
498 if (fps <= range.minFrameRate)
499 return range.maxFrameDuration;
500 if (fps >= range.maxFrameRate)
501 return range.minFrameDuration;
502
503 auto frac = qRealToFraction(1. / fps);
504 return CMTimeMake(frac.numerator, frac.denominator);
505}
506
507void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
508{
509 Q_ASSERT(captureDevice);
510 if (!captureDevice.activeFormat) {
511 qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
512 return;
513 }
514
515 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
516 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
517 << minFPS << maxFPS;
518 return;
519 }
520
521 CMTime minFrameDuration = kCMTimeInvalid;
522 CMTime maxFrameDuration = kCMTimeInvalid;
523 if (maxFPS || minFPS) {
524 AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
525 maxFPS ? maxFPS : minFPS);
526 if (!range) {
527 qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
528 << minFPS << maxFPS;
529 return;
530 }
531
532 if (maxFPS)
533 minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
534 if (minFPS)
535 maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
536 }
537
538 const AVFConfigurationLock lock(captureDevice);
539 if (!lock) {
540 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
541 return;
542 }
543
544 // While Apple's docs say kCMTimeInvalid will end in default
545 // settings for this format, kCMTimeInvalid on OS X ends with a runtime
546 // exception:
547 // "The activeVideoMinFrameDuration passed is not supported by the device."
548 // Instead, use the first item in the supported frame rates.
549#ifdef Q_OS_IOS
550 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
551 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
552#elif defined(Q_OS_MACOS)
553 if (CMTimeCompare(minFrameDuration, kCMTimeInvalid) == 0
554 && CMTimeCompare(maxFrameDuration, kCMTimeInvalid) == 0) {
555 AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
556 minFrameDuration = range.minFrameDuration;
557 maxFrameDuration = range.maxFrameDuration;
558 }
559
560 if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
561 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
562
563 if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
564 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
565#endif // Q_OS_MACOS
566}
567
568void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
569 qreal minFPS, qreal maxFPS)
570{
571 Q_UNUSED(videoConnection);
572 Q_ASSERT(captureDevice);
573 qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
574}
575
576AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
577{
578 Q_UNUSED(videoConnection);
579 Q_ASSERT(captureDevice);
580
581 AVFPSRange fps;
582 const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
583 if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
584 if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
585 fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
586 }
587
588 const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
589 if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
590 if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
591 fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
592 }
593
594 return fps;
595}
596
598{
599 QList<AudioValueRange> result;
600 UInt32 format = codecId;
601 UInt32 size;
602 OSStatus err = AudioFormatGetPropertyInfo(
603 kAudioFormatProperty_AvailableEncodeSampleRates,
604 sizeof(format),
605 &format,
606 &size);
607
608 if (err != noErr)
609 return result;
610
611 UInt32 numRanges = size / sizeof(AudioValueRange);
612 AudioValueRange sampleRanges[numRanges];
613
614 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
615 sizeof(format),
616 &format,
617 &size,
618 sampleRanges);
619 if (err != noErr)
620 return result;
621
622 for (UInt32 i = 0; i < numRanges; i++)
623 result << sampleRanges[i];
624
625 return result;
626}
627
629{
630 QList<AudioValueRange> result;
631 UInt32 format = codecId;
632 UInt32 size;
633 OSStatus err = AudioFormatGetPropertyInfo(
634 kAudioFormatProperty_AvailableEncodeBitRates,
635 sizeof(format),
636 &format,
637 &size);
638
639 if (err != noErr)
640 return result;
641
642 UInt32 numRanges = size / sizeof(AudioValueRange);
643 AudioValueRange bitRanges[numRanges];
644
645 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
646 sizeof(format),
647 &format,
648 &size,
649 bitRanges);
650 if (err != noErr)
651 return result;
652
653 for (UInt32 i = 0; i < numRanges; i++)
654 result << bitRanges[i];
655
656 return result;
657}
658
659std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId)
660{
661 QList<UInt32> result;
662 AudioStreamBasicDescription sf = {};
663 sf.mFormatID = codecId;
664 UInt32 size;
665 OSStatus err = AudioFormatGetPropertyInfo(
666 kAudioFormatProperty_AvailableEncodeNumberChannels,
667 sizeof(sf),
668 &sf,
669 &size);
670
671 if (err != noErr)
672 return result;
673
674 // From Apple's docs:
675 // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
676 if (int(size) == -1)
677 return std::nullopt;
678
679 UInt32 numCounts = size / sizeof(UInt32);
680 UInt32 channelCounts[numCounts];
681
682 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
683 sizeof(sf),
684 &sf,
685 &size,
686 channelCounts);
687 if (err != noErr)
688 return result;
689
690 for (UInt32 i = 0; i < numCounts; i++)
691 result << channelCounts[i];
692
693 return result;
694}
695
696QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
697{
698 QList<UInt32> result;
699 AudioStreamBasicDescription sf = {};
700 sf.mFormatID = codecId;
701 sf.mChannelsPerFrame = noChannels;
702 UInt32 size;
703 OSStatus err = AudioFormatGetPropertyInfo(
704 kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
705 sizeof(sf),
706 &sf,
707 &size);
708
709 if (err != noErr)
710 return result;
711
712 UInt32 noTags = (UInt32)size / sizeof(UInt32);
713 AudioChannelLayoutTag tagsArr[noTags];
714
715 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
716 sizeof(sf),
717 &sf,
718 &size,
719 tagsArr);
720 if (err != noErr)
721 return result;
722
723 for (UInt32 i = 0; i < noTags; i++)
724 result << tagsArr[i];
725
726 return result;
727}
728
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
std::optional< QList< UInt32 > > qt_supported_channel_counts_for_format(int codecId)
QVector< AVCaptureDeviceFormat * > qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
QVector< AVFPSRange > qt_device_format_framerates(AVCaptureDeviceFormat *format)
QT_BEGIN_NAMESPACE AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
QList< AudioValueRange > qt_supported_sample_rates_for_format(int codecId)
QList< UInt32 > qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
AVCaptureDeviceFormat * qt_find_best_framerate_match(AVCaptureDevice *captureDevice, FourCharCode filter, Float64 fps)
QList< AudioValueRange > qt_supported_bit_rates_for_format(int codecId)
bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
AVFrameRateRange * qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
AVCaptureDeviceFormat * qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request, FourCharCode filter, bool stillImage)
CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
AVCaptureDeviceFormat * qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice, const QCameraFormat &cameraFormat, const std::function< bool(uint32_t)> &cvFormatValidator)
QPair< qreal, qreal > AVFPSRange
static const QCameraFormatPrivate * handle(const QCameraFormat &format)
The QCameraFormat class describes a video format supported by a camera device. \inmodule QtMultimedia...
constexpr int height() const noexcept
Returns the height of the rectangle.
Definition qrect.h:239
constexpr int width() const noexcept
Returns the width of the rectangle.
Definition qrect.h:236
\inmodule QtCore
Definition qsize.h:25
constexpr bool isNull() const noexcept
Returns true if both the width and height is 0; otherwise returns false.
Definition qsize.h:121
constexpr bool isValid() const noexcept
Returns true if both the width and height is equal to or greater than 0; otherwise returns false.
Definition qsize.h:127
short next
Definition keywords.cpp:445
EGLint EGLint * formats
CvPixelFormat toCVPixelFormat(QVideoFrameFormat::PixelFormat pixFmt, QVideoFrameFormat::ColorRange colorRange)
Combined button and popup list for selecting options.
constexpr CvPixelFormat CvPixelFormatInvalid
unsigned long NSUInteger
#define Q_FUNC_INFO
QMediaFormat::AudioCodec codec
static AVCodecID codecId(QMediaFormat::VideoCodec codec)
static int area(const QSize &s)
Definition qicon.cpp:153
#define qWarning
Definition qlogging.h:166
#define Q_LOGGING_CATEGORY(name,...)
#define qCDebug(category,...)
constexpr const T & qMin(const T &a, const T &b)
Definition qminmax.h:40
QT_BEGIN_NAMESPACE Fraction qRealToFraction(qreal value)
constexpr T qAbs(const T &t)
Definition qnumeric.h:328
GLenum GLuint GLintptr GLsizeiptr size
[1]
GLsizei range
GLsizei GLsizei GLfloat distance
GLint GLint GLint GLint GLint GLint GLint GLbitfield GLenum filter
GLint first
GLint GLsizei GLsizei GLenum format
GLuint res
GLuint in
GLuint64EXT * result
[6]
static const qreal epsilon
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
#define Q_UNUSED(x)
static bool match(const uchar *found, uint foundLen, const char *target, uint targetLen)
double qreal
Definition qtypes.h:187
QReadWriteLock lock
[0]
QRect r1(100, 200, 11, 16)
[0]
QRect r2(QPoint(100, 200), QSize(11, 16))
QNetworkRequest request(url)