Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
evrcustompresenter.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
7#include "evrhelpers_p.h"
8#include <private/qwindowsmultimediautils_p.h>
9#include <private/qplatformvideosink_p.h>
10#include <private/qwindowsmfdefs_p.h>
11
12#include <rhi/qrhi.h>
13
14#include <QtCore/qmutex.h>
15#include <QtCore/qvarlengtharray.h>
16#include <QtCore/qrect.h>
17#include <qthread.h>
18#include <qcoreapplication.h>
19#include <qmath.h>
20#include <qloggingcategory.h>
21
22#include <mutex>
23
24#include <float.h>
25#include <evcode.h>
26
28
29static Q_LOGGING_CATEGORY(qLcEvrCustomPresenter, "qt.multimedia.evrcustompresenter")
30
31const static MFRatio g_DefaultFrameRate = { 30, 1 };
32static const DWORD SCHEDULER_TIMEOUT = 5000;
33static const MFTIME ONE_SECOND = 10000000;
34static const LONG ONE_MSEC = 1000;
35
36// Function declarations.
37static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
39
40static inline LONG MFTimeToMsec(const LONGLONG& time)
41{
42 return (LONG)(time / (ONE_SECOND / ONE_MSEC));
43}
44
45bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
46{
47 if (!evr || !presenter)
48 return false;
49
50 HRESULT result = E_FAIL;
51
52 IMFVideoRenderer *renderer = NULL;
53 if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
54 result = renderer->InitializeRenderer(NULL, presenter);
55 renderer->Release();
56 }
57
58 return result == S_OK;
59}
60
62{
63public:
64 explicit PresentSampleEvent(const ComPtr<IMFSample> &sample)
65 : QEvent(static_cast<Type>(EVRCustomPresenter::PresentSample)), m_sample(sample)
66 {
67 }
68
69 ComPtr<IMFSample> sample() const { return m_sample; }
70
71private:
72 const ComPtr<IMFSample> m_sample;
73};
74
76 : m_presenter(presenter)
77 , m_threadID(0)
78 , m_playbackRate(1.0f)
79 , m_perFrame_1_4th(0)
80{
81}
82
84{
85 m_scheduledSamples.clear();
86}
87
88void Scheduler::setFrameRate(const MFRatio& fps)
89{
90 UINT64 AvgTimePerFrame = 0;
91
92 // Convert to a duration.
93 MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
94
95 // Calculate 1/4th of this value, because we use it frequently.
96 m_perFrame_1_4th = AvgTimePerFrame / 4;
97}
98
99HRESULT Scheduler::startScheduler(ComPtr<IMFClock> clock)
100{
101 if (m_schedulerThread)
102 return E_UNEXPECTED;
103
104 HRESULT hr = S_OK;
105 DWORD dwID = 0;
106 HANDLE hObjects[2];
107 DWORD dwWait = 0;
108
109 m_clock = clock;
110
111 // Set a high the timer resolution (ie, short timer period).
112 timeBeginPeriod(1);
113
114 // Create an event to wait for the thread to start.
115 m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
116 if (!m_threadReadyEvent) {
117 hr = HRESULT_FROM_WIN32(GetLastError());
118 goto done;
119 }
120
121 // Create an event to wait for flush commands to complete.
122 m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
123 if (!m_flushEvent) {
124 hr = HRESULT_FROM_WIN32(GetLastError());
125 goto done;
126 }
127
128 // Create the scheduler thread.
129 m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID) };
130 if (!m_schedulerThread) {
131 hr = HRESULT_FROM_WIN32(GetLastError());
132 goto done;
133 }
134
135 // Wait for the thread to signal the "thread ready" event.
136 hObjects[0] = m_threadReadyEvent.get();
137 hObjects[1] = m_schedulerThread.get();
138 dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
139 if (WAIT_OBJECT_0 != dwWait) {
140 // The thread terminated early for some reason. This is an error condition.
141 m_schedulerThread = {};
142
143 hr = E_UNEXPECTED;
144 goto done;
145 }
146
147 m_threadID = dwID;
148
149done:
150 // Regardless success/failure, we are done using the "thread ready" event.
151 m_threadReadyEvent = {};
152
153 return hr;
154}
155
157{
158 if (!m_schedulerThread)
159 return S_OK;
160
161 // Ask the scheduler thread to exit.
162 PostThreadMessage(m_threadID, Terminate, 0, 0);
163
164 // Wait for the thread to exit.
165 WaitForSingleObject(m_schedulerThread.get(), INFINITE);
166
167 // Close handles.
168 m_schedulerThread = {};
169 m_flushEvent = {};
170
171 // Discard samples.
172 m_mutex.lock();
173 m_scheduledSamples.clear();
174 m_mutex.unlock();
175
176 // Restore the timer resolution.
177 timeEndPeriod(1);
178
179 return S_OK;
180}
181
183{
184 if (m_schedulerThread) {
185 // Ask the scheduler thread to flush.
186 PostThreadMessage(m_threadID, Flush, 0 , 0);
187
188 // Wait for the scheduler thread to signal the flush event,
189 // OR for the thread to terminate.
190 HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
191
192 WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
193 }
194
195 return S_OK;
196}
197
199{
200 QMutexLocker locker(&m_mutex);
201 return m_scheduledSamples.count() > 0;
202}
203
204HRESULT Scheduler::scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow)
205{
206 if (!m_schedulerThread)
207 return MF_E_NOT_INITIALIZED;
208
209 HRESULT hr = S_OK;
210 DWORD dwExitCode = 0;
211
212 GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
213 if (dwExitCode != STILL_ACTIVE)
214 return E_FAIL;
215
216 if (presentNow || !m_clock) {
217 m_presenter->presentSample(sample);
218 } else {
219 if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
220 qCDebug(qLcEvrCustomPresenter) << "Discard the sample, it came too late";
221 return hr;
222 }
223
224 // Queue the sample and ask the scheduler thread to wake up.
225 m_mutex.lock();
226 m_scheduledSamples.enqueue(sample);
227 m_mutex.unlock();
228
229 if (SUCCEEDED(hr))
230 PostThreadMessage(m_threadID, Schedule, 0, 0);
231 }
232
233 return hr;
234}
235
237{
238 HRESULT hr = S_OK;
239 LONG wait = 0;
240
241 QQueue<ComPtr<IMFSample>> scheduledSamples;
242
243 m_mutex.lock();
244 m_scheduledSamples.swap(scheduledSamples);
245 m_mutex.unlock();
246
247 // Process samples until the queue is empty or until the wait time > 0.
248 while (!scheduledSamples.isEmpty()) {
249 ComPtr<IMFSample> sample = scheduledSamples.dequeue();
250
251 // Process the next sample in the queue. If the sample is not ready
252 // for presentation. the value returned in wait is > 0, which
253 // means the scheduler should sleep for that amount of time.
254 if (isSampleReadyToPresent(sample.Get(), &wait)) {
255 m_presenter->presentSample(sample.Get());
256 continue;
257 }
258
259 if (wait > 0) {
260 // return the sample to scheduler
261 scheduledSamples.prepend(sample);
262 break;
263 }
264 }
265
266 m_mutex.lock();
267 scheduledSamples.append(std::move(m_scheduledSamples));
268 m_scheduledSamples.swap(scheduledSamples);
269 m_mutex.unlock();
270
271 // If the wait time is zero, it means we stopped because the queue is
272 // empty (or an error occurred). Set the wait time to infinite; this will
273 // make the scheduler thread sleep until it gets another thread message.
274 if (wait == 0)
275 wait = INFINITE;
276
277 *nextSleep = wait;
278 return hr;
279}
280
281bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const
282{
283 *pNextSleep = 0;
284 if (!m_clock)
285 return true;
286
287 MFTIME hnsPresentationTime = 0;
288 MFTIME hnsTimeNow = 0;
289 MFTIME hnsSystemTime = 0;
290
291 // Get the sample's time stamp. It is valid for a sample to
292 // have no time stamp.
293 HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
294
295 // Get the clock time. (But if the sample does not have a time stamp,
296 // we don't need the clock time.)
297 if (SUCCEEDED(hr))
298 hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
299
300 // Calculate the time until the sample's presentation time.
301 // A negative value means the sample is late.
302 MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
303 if (m_playbackRate < 0) {
304 // For reverse playback, the clock runs backward. Therefore, the
305 // delta is reversed.
306 hnsDelta = - hnsDelta;
307 }
308
309 if (hnsDelta < - m_perFrame_1_4th) {
310 // This sample is late - skip.
311 return false;
312 } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
313 // This sample came too early - reschedule
314 *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
315
316 // Adjust the sleep time for the clock rate. (The presentation clock runs
317 // at m_fRate, but sleeping uses the system clock.)
318 if (m_playbackRate != 0)
319 *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
320 return *pNextSleep == 0;
321 } else {
322 // This sample can be presented right now
323 return true;
324 }
325}
326
327DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
328{
329 Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
330 if (!scheduler)
331 return -1;
332 return scheduler->schedulerThreadProcPrivate();
333}
334
335DWORD Scheduler::schedulerThreadProcPrivate()
336{
337 HRESULT hr = S_OK;
338 MSG msg;
339 LONG wait = INFINITE;
340 bool exitThread = false;
341
342 // Force the system to create a message queue for this thread.
343 // (See MSDN documentation for PostThreadMessage.)
344 PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
345
346 // Signal to the scheduler that the thread is ready.
347 SetEvent(m_threadReadyEvent.get());
348
349 while (!exitThread) {
350 // Wait for a thread message OR until the wait time expires.
351 DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
352
353 if (result == WAIT_TIMEOUT) {
354 // If we timed out, then process the samples in the queue
355 hr = processSamplesInQueue(&wait);
356 if (FAILED(hr))
357 exitThread = true;
358 }
359
360 while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
361 bool processSamples = true;
362
363 switch (msg.message) {
364 case Terminate:
365 exitThread = true;
366 break;
367 case Flush:
368 // Flushing: Clear the sample queue and set the event.
369 m_mutex.lock();
370 m_scheduledSamples.clear();
371 m_mutex.unlock();
372 wait = INFINITE;
373 SetEvent(m_flushEvent.get());
374 break;
375 case Schedule:
376 // Process as many samples as we can.
377 if (processSamples) {
378 hr = processSamplesInQueue(&wait);
379 if (FAILED(hr))
380 exitThread = true;
381 processSamples = (wait != (LONG)INFINITE);
382 }
383 break;
384 }
385 }
386
387 }
388
389 return (SUCCEEDED(hr) ? 0 : 1);
390}
391
392
394 : m_initialized(false)
395{
396}
397
402
403ComPtr<IMFSample> SamplePool::takeSample()
404{
405 QMutexLocker locker(&m_mutex);
406
407 Q_ASSERT(m_initialized);
408 if (!m_initialized) {
409 qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
410 return nullptr;
411 }
412
413 if (m_videoSampleQueue.isEmpty()) {
414 qCDebug(qLcEvrCustomPresenter) << "SamplePool is empty";
415 return nullptr;
416 }
417
418 // Get a sample from the allocated queue.
419
420 // It doesn't matter if we pull them from the head or tail of the list,
421 // but when we get it back, we want to re-insert it onto the opposite end.
422 // (see returnSample)
423
424 return m_videoSampleQueue.takeFirst();
425}
426
427void SamplePool::returnSample(const ComPtr<IMFSample> &sample)
428{
429 QMutexLocker locker(&m_mutex);
430
431 Q_ASSERT(m_initialized);
432 if (!m_initialized) {
433 qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
434 return;
435 }
436
437 m_videoSampleQueue.append(sample);
438}
439
441{
442 QMutexLocker locker(&m_mutex);
443
444 if (m_initialized)
445 return MF_E_INVALIDREQUEST;
446
447 // Move these samples into our allocated queue.
448 m_videoSampleQueue.append(std::move(samples));
449
450 m_initialized = true;
451
452 return S_OK;
453}
454
456{
457 QMutexLocker locker(&m_mutex);
458
459 m_videoSampleQueue.clear();
460 m_initialized = false;
461
462 return S_OK;
463}
464
465
467 : QObject()
468 , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
469 , m_refCount(1)
470 , m_renderState(RenderShutdown)
471 , m_scheduler(this)
472 , m_tokenCounter(0)
473 , m_sampleNotify(false)
474 , m_prerolled(false)
475 , m_endStreaming(false)
476 , m_playbackRate(1.0f)
477 , m_presentEngine(new D3DPresentEngine(sink))
478 , m_mediaType(0)
479 , m_videoSink(0)
480 , m_canRenderToSurface(false)
481 , m_positionOffset(0)
482{
483 // Initial source rectangle = (0,0,1,1)
484 m_sourceRect.top = 0;
485 m_sourceRect.left = 0;
486 m_sourceRect.bottom = 1;
487 m_sourceRect.right = 1;
488
489 setSink(sink);
490}
491
493{
494 m_scheduler.flush();
495 m_scheduler.stopScheduler();
496 m_samplePool.clear();
497
498 delete m_presentEngine;
499}
500
502{
503 if (!ppvObject)
504 return E_POINTER;
505 if (riid == IID_IMFGetService) {
506 *ppvObject = static_cast<IMFGetService*>(this);
507 } else if (riid == IID_IMFTopologyServiceLookupClient) {
508 *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
509 } else if (riid == IID_IMFVideoDeviceID) {
510 *ppvObject = static_cast<IMFVideoDeviceID*>(this);
511 } else if (riid == IID_IMFVideoPresenter) {
512 *ppvObject = static_cast<IMFVideoPresenter*>(this);
513 } else if (riid == IID_IMFRateSupport) {
514 *ppvObject = static_cast<IMFRateSupport*>(this);
515 } else if (riid == IID_IUnknown) {
516 *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
517 } else if (riid == IID_IMFClockStateSink) {
518 *ppvObject = static_cast<IMFClockStateSink*>(this);
519 } else {
520 *ppvObject = NULL;
521 return E_NOINTERFACE;
522 }
523 AddRef();
524 return S_OK;
525}
526
527ULONG EVRCustomPresenter::AddRef()
528{
529 return InterlockedIncrement(&m_refCount);
530}
531
532ULONG EVRCustomPresenter::Release()
533{
534 ULONG uCount = InterlockedDecrement(&m_refCount);
535 if (uCount == 0)
536 deleteLater();
537 return uCount;
538}
539
540HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
541{
542 HRESULT hr = S_OK;
543
544 if (!ppvObject)
545 return E_POINTER;
546
547 // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
548 if (guidService != MR_VIDEO_RENDER_SERVICE)
549 return MF_E_UNSUPPORTED_SERVICE;
550
551 // First try to get the service interface from the D3DPresentEngine object.
552 hr = m_presentEngine->getService(guidService, riid, ppvObject);
553 if (FAILED(hr))
554 // Next, check if this object supports the interface.
555 hr = QueryInterface(riid, ppvObject);
556
557 return hr;
558}
559
561{
562 if (!deviceID)
563 return E_POINTER;
564
565 *deviceID = IID_IDirect3DDevice9;
566
567 return S_OK;
568}
569
570HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
571{
572 if (!lookup)
573 return E_POINTER;
574
575 HRESULT hr = S_OK;
576 DWORD objectCount = 0;
577
578 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
579
580 // Do not allow initializing when playing or paused.
581 if (isActive())
582 return MF_E_INVALIDREQUEST;
583
584 m_clock.Reset();
585 m_mixer.Reset();
586 m_mediaEventSink.Reset();
587
588 // Ask for the clock. Optional, because the EVR might not have a clock.
589 objectCount = 1;
590
591 lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
592 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
593 &objectCount
594 );
595
596 // Ask for the mixer. (Required.)
597 objectCount = 1;
598
599 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
600 MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
601 &objectCount
602 );
603
604 if (FAILED(hr))
605 return hr;
606
607 // Make sure that we can work with this mixer.
608 hr = configureMixer(m_mixer.Get());
609 if (FAILED(hr))
610 return hr;
611
612 // Ask for the EVR's event-sink interface. (Required.)
613 objectCount = 1;
614
615 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
616 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
617 &objectCount
618 );
619
620 if (SUCCEEDED(hr))
621 m_renderState = RenderStopped;
622
623 return hr;
624}
625
627{
628 // Enter the shut-down state.
629 m_mutex.lock();
630
631 m_renderState = RenderShutdown;
632
633 m_mutex.unlock();
634
635 // Flush any samples that were scheduled.
636 flush();
637
638 // Clear the media type and release related resources.
639 setMediaType(NULL);
640
641 // Release all services that were acquired from InitServicePointers.
642 m_clock.Reset();
643 m_mixer.Reset();
644 m_mediaEventSink.Reset();
645
646 return S_OK;
647}
648
650{
651 return m_presentEngine->isValid() && m_canRenderToSurface;
652}
653
655{
656 HRESULT hr = S_OK;
657
658 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
659
660 hr = checkShutdown();
661 if (FAILED(hr))
662 return hr;
663
664 switch (message) {
665 // Flush all pending samples.
666 case MFVP_MESSAGE_FLUSH:
667 hr = flush();
668 break;
669
670 // Renegotiate the media type with the mixer.
671 case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
672 hr = renegotiateMediaType();
673 break;
674
675 // The mixer received a new input sample.
676 case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
677 hr = processInputNotify();
678 break;
679
680 // Streaming is about to start.
681 case MFVP_MESSAGE_BEGINSTREAMING:
682 hr = beginStreaming();
683 break;
684
685 // Streaming has ended. (The EVR has stopped.)
686 case MFVP_MESSAGE_ENDSTREAMING:
687 hr = endStreaming();
688 break;
689
690 // All input streams have ended.
691 case MFVP_MESSAGE_ENDOFSTREAM:
692 // Set the EOS flag.
693 m_endStreaming = true;
694 // Check if it's time to send the EC_COMPLETE event to the EVR.
695 hr = checkEndOfStream();
696 break;
697
698 // Frame-stepping is starting.
699 case MFVP_MESSAGE_STEP:
700 hr = prepareFrameStep(DWORD(param));
701 break;
702
703 // Cancels frame-stepping.
704 case MFVP_MESSAGE_CANCELSTEP:
705 hr = cancelFrameStep();
706 break;
707
708 default:
709 hr = E_INVALIDARG; // Unknown message. This case should never occur.
710 break;
711 }
712
713 return hr;
714}
715
717{
718 HRESULT hr = S_OK;
719
720 if (!mediaType)
721 return E_POINTER;
722
723 *mediaType = NULL;
724
725 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
726
727 hr = checkShutdown();
728 if (FAILED(hr))
729 return hr;
730
731 if (!m_mediaType)
732 return MF_E_NOT_INITIALIZED;
733
734 return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
735}
736
737HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
738{
739 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
740
741 // We cannot start after shutdown.
742 HRESULT hr = checkShutdown();
743 if (FAILED(hr))
744 return hr;
745
746 // Check if the clock is already active (not stopped).
747 if (isActive()) {
748 m_renderState = RenderStarted;
749
750 // If the clock position changes while the clock is active, it
751 // is a seek request. We need to flush all pending samples.
752 if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
753 flush();
754 } else {
755 m_renderState = RenderStarted;
756
757 // The clock has started from the stopped state.
758
759 // Possibly we are in the middle of frame-stepping OR have samples waiting
760 // in the frame-step queue. Deal with these two cases first:
761 hr = startFrameStep();
762 if (FAILED(hr))
763 return hr;
764 }
765
766 // Now try to get new output samples from the mixer.
767 processOutputLoop();
768
769 return hr;
770}
771
773{
774 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
775
776 HRESULT hr = checkShutdown();
777 if (FAILED(hr))
778 return hr;
779
780 // The EVR calls OnClockRestart only while paused.
781
782 m_renderState = RenderStarted;
783
784 // Possibly we are in the middle of frame-stepping OR we have samples waiting
785 // in the frame-step queue. Deal with these two cases first:
786 hr = startFrameStep();
787 if (FAILED(hr))
788 return hr;
789
790 // Now resume the presentation loop.
791 processOutputLoop();
792
793 return hr;
794}
795
797{
798 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
799
800 HRESULT hr = checkShutdown();
801 if (FAILED(hr))
802 return hr;
803
804 if (m_renderState != RenderStopped) {
805 m_renderState = RenderStopped;
806 flush();
807
808 // If we are in the middle of frame-stepping, cancel it now.
809 if (m_frameStep.state != FrameStepNone)
810 cancelFrameStep();
811 }
812
813 return S_OK;
814}
815
817{
818 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
819
820 // We cannot pause the clock after shutdown.
821 HRESULT hr = checkShutdown();
822
823 if (SUCCEEDED(hr))
824 m_renderState = RenderPaused;
825
826 return hr;
827}
828
830{
831 // Note:
832 // The presenter reports its maximum rate through the IMFRateSupport interface.
833 // Here, we assume that the EVR honors the maximum rate.
834
835 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
836
837 HRESULT hr = checkShutdown();
838 if (FAILED(hr))
839 return hr;
840
841 // If the rate is changing from zero (scrubbing) to non-zero, cancel the
842 // frame-step operation.
843 if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
844 cancelFrameStep();
845 m_frameStep.samples.clear();
846 }
847
848 m_playbackRate = rate;
849
850 // Tell the scheduler about the new rate.
851 m_scheduler.setClockRate(rate);
852
853 return S_OK;
854}
855
856HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
857{
858 if (!rate)
859 return E_POINTER;
860
861 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
862
863 HRESULT hr = checkShutdown();
864
865 if (SUCCEEDED(hr)) {
866 // There is no minimum playback rate, so the minimum is zero.
867 *rate = 0;
868 }
869
870 return S_OK;
871}
872
873HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
874{
875 if (!rate)
876 return E_POINTER;
877
878 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
879
880 float maxRate = 0.0f;
881
882 HRESULT hr = checkShutdown();
883 if (FAILED(hr))
884 return hr;
885
886 // Get the maximum *forward* rate.
887 maxRate = getMaxRate(thin);
888
889 // For reverse playback, it's the negative of maxRate.
890 if (direction == MFRATE_REVERSE)
891 maxRate = -maxRate;
892
893 *rate = maxRate;
894
895 return S_OK;
896}
897
898HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
899{
900 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
901
902 float maxRate = 0.0f;
903 float nearestRate = rate; // If we support rate, that is the nearest.
904
905 HRESULT hr = checkShutdown();
906 if (FAILED(hr))
907 return hr;
908
909 // Find the maximum forward rate.
910 // Note: We have no minimum rate (that is, we support anything down to 0).
911 maxRate = getMaxRate(thin);
912
913 if (qFabs(rate) > maxRate) {
914 // The (absolute) requested rate exceeds the maximum rate.
915 hr = MF_E_UNSUPPORTED_RATE;
916
917 // The nearest supported rate is maxRate.
918 nearestRate = maxRate;
919 if (rate < 0) {
920 // Negative for reverse playback.
921 nearestRate = -nearestRate;
922 }
923 }
924
925 // Return the nearest supported rate.
926 if (nearestSupportedRate)
927 *nearestSupportedRate = nearestRate;
928
929 return hr;
930}
931
933{
934 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
935
936 m_canRenderToSurface = false;
937
938 // check if we can render to the surface (compatible formats)
939 if (m_videoSink) {
940 for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
941 // ### set a better preference order
943 if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
944 m_canRenderToSurface = true;
945 break;
946 }
947 }
948 }
949
950 // TODO: if media type already set, renegotiate?
951}
952
954{
955 m_mutex.lock();
956 m_videoSink = sink;
957 m_presentEngine->setSink(sink);
958 m_mutex.unlock();
959
961}
962
964{
965 m_mutex.lock();
966 m_cropRect = cropRect;
967 m_mutex.unlock();
968}
969
970HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
971{
972 // Set the zoom rectangle (ie, the source clipping rectangle).
973 return setMixerSourceRect(mixer, m_sourceRect);
974}
975
976HRESULT EVRCustomPresenter::renegotiateMediaType()
977{
978 HRESULT hr = S_OK;
979 bool foundMediaType = false;
980
981 IMFMediaType *mixerType = NULL;
982 IMFMediaType *optimalType = NULL;
983
984 if (!m_mixer)
985 return MF_E_INVALIDREQUEST;
986
987 // Loop through all of the mixer's proposed output types.
988 DWORD typeIndex = 0;
989 while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
990 qt_evr_safe_release(&mixerType);
991 qt_evr_safe_release(&optimalType);
992
993 // Step 1. Get the next media type supported by mixer.
994 hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
995 if (FAILED(hr))
996 break;
997
998 // From now on, if anything in this loop fails, try the next type,
999 // until we succeed or the mixer runs out of types.
1000
1001 // Step 2. Check if we support this media type.
1002 if (SUCCEEDED(hr))
1003 hr = isMediaTypeSupported(mixerType);
1004
1005 // Step 3. Adjust the mixer's type to match our requirements.
1006 if (SUCCEEDED(hr))
1007 hr = createOptimalVideoType(mixerType, &optimalType);
1008
1009 // Step 4. Check if the mixer will accept this media type.
1010 if (SUCCEEDED(hr))
1011 hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
1012
1013 // Step 5. Try to set the media type on ourselves.
1014 if (SUCCEEDED(hr))
1015 hr = setMediaType(optimalType);
1016
1017 // Step 6. Set output media type on mixer.
1018 if (SUCCEEDED(hr)) {
1019 hr = m_mixer->SetOutputType(0, optimalType, 0);
1020
1021 // If something went wrong, clear the media type.
1022 if (FAILED(hr))
1023 setMediaType(NULL);
1024 }
1025
1026 if (SUCCEEDED(hr))
1027 foundMediaType = true;
1028 }
1029
1030 qt_evr_safe_release(&mixerType);
1031 qt_evr_safe_release(&optimalType);
1032
1033 return hr;
1034}
1035
1036HRESULT EVRCustomPresenter::flush()
1037{
1038 m_prerolled = false;
1039
1040 // The scheduler might have samples that are waiting for
1041 // their presentation time. Tell the scheduler to flush.
1042
1043 // This call blocks until the scheduler threads discards all scheduled samples.
1044 m_scheduler.flush();
1045
1046 // Flush the frame-step queue.
1047 m_frameStep.samples.clear();
1048
1049 if (m_renderState == RenderStopped && m_videoSink) {
1050 // Repaint with black.
1051 presentSample(nullptr);
1052 }
1053
1054 return S_OK;
1055}
1056
1057HRESULT EVRCustomPresenter::processInputNotify()
1058{
1059 HRESULT hr = S_OK;
1060
1061 // Set the flag that says the mixer has a new sample.
1062 m_sampleNotify = true;
1063
1064 if (!m_mediaType) {
1065 // We don't have a valid media type yet.
1066 hr = MF_E_TRANSFORM_TYPE_NOT_SET;
1067 } else {
1068 // Try to process an output sample.
1069 processOutputLoop();
1070 }
1071 return hr;
1072}
1073
1074HRESULT EVRCustomPresenter::beginStreaming()
1075{
1076 HRESULT hr = S_OK;
1077
1078 // Start the scheduler thread.
1079 hr = m_scheduler.startScheduler(m_clock);
1080
1081 return hr;
1082}
1083
1084HRESULT EVRCustomPresenter::endStreaming()
1085{
1086 HRESULT hr = S_OK;
1087
1088 // Stop the scheduler thread.
1089 hr = m_scheduler.stopScheduler();
1090
1091 return hr;
1092}
1093
1094HRESULT EVRCustomPresenter::checkEndOfStream()
1095{
1096 if (!m_endStreaming) {
1097 // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
1098 return S_OK;
1099 }
1100
1101 if (m_sampleNotify) {
1102 // The mixer still has input.
1103 return S_OK;
1104 }
1105
1106 if (m_scheduler.areSamplesScheduled()) {
1107 // Samples are still scheduled for rendering.
1108 return S_OK;
1109 }
1110
1111 // Everything is complete. Now we can tell the EVR that we are done.
1112 notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
1113 m_endStreaming = false;
1114
1115 stopSurface();
1116 return S_OK;
1117}
1118
1119HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
1120{
1121 HRESULT hr = S_OK;
1122
1123 // Cache the step count.
1124 m_frameStep.steps += steps;
1125
1126 // Set the frame-step state.
1127 m_frameStep.state = FrameStepWaitingStart;
1128
1129 // If the clock is are already running, we can start frame-stepping now.
1130 // Otherwise, we will start when the clock starts.
1131 if (m_renderState == RenderStarted)
1132 hr = startFrameStep();
1133
1134 return hr;
1135}
1136
1137HRESULT EVRCustomPresenter::startFrameStep()
1138{
1139 if (m_frameStep.state == FrameStepWaitingStart) {
1140 // We have a frame-step request, and are waiting for the clock to start.
1141 // Set the state to "pending," which means we are waiting for samples.
1142 m_frameStep.state = FrameStepPending;
1143
1144 // If the frame-step queue already has samples, process them now.
1145 while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
1146 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1147
1148 const HRESULT hr = deliverFrameStepSample(sample.Get());
1149 if (FAILED(hr))
1150 return hr;
1151
1152 // We break from this loop when:
1153 // (a) the frame-step queue is empty, or
1154 // (b) the frame-step operation is complete.
1155 }
1156 } else if (m_frameStep.state == FrameStepNone) {
1157 // We are not frame stepping. Therefore, if the frame-step queue has samples,
1158 // we need to process them normally.
1159 while (!m_frameStep.samples.isEmpty()) {
1160 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1161
1162 const HRESULT hr = deliverSample(sample.Get());
1163 if (FAILED(hr))
1164 return hr;
1165 }
1166 }
1167
1168 return S_OK;
1169}
1170
1171HRESULT EVRCustomPresenter::completeFrameStep(const ComPtr<IMFSample> &sample)
1172{
1173 HRESULT hr = S_OK;
1174 MFTIME sampleTime = 0;
1175 MFTIME systemTime = 0;
1176
1177 // Update our state.
1178 m_frameStep.state = FrameStepComplete;
1179 m_frameStep.sampleNoRef = 0;
1180
1181 // Notify the EVR that the frame-step is complete.
1182 notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
1183
1184 // If we are scrubbing (rate == 0), also send the "scrub time" event.
1185 if (isScrubbing()) {
1186 // Get the time stamp from the sample.
1187 hr = sample->GetSampleTime(&sampleTime);
1188 if (FAILED(hr)) {
1189 // No time stamp. Use the current presentation time.
1190 if (m_clock)
1191 m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
1192
1193 hr = S_OK; // (Not an error condition.)
1194 }
1195
1196 notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
1197 }
1198 return hr;
1199}
1200
1201HRESULT EVRCustomPresenter::cancelFrameStep()
1202{
1203 FrameStepState oldState = m_frameStep.state;
1204
1205 m_frameStep.state = FrameStepNone;
1206 m_frameStep.steps = 0;
1207 m_frameStep.sampleNoRef = 0;
1208 // Don't clear the frame-step queue yet, because we might frame step again.
1209
1210 if (oldState > FrameStepNone && oldState < FrameStepComplete) {
1211 // We were in the middle of frame-stepping when it was cancelled.
1212 // Notify the EVR.
1213 notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
1214 }
1215 return S_OK;
1216}
1217
1218HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
1219{
1220 HRESULT hr = S_OK;
1221
1222 RECT rcOutput;
1223 ZeroMemory(&rcOutput, sizeof(rcOutput));
1224
1225 MFVideoArea displayArea;
1226 ZeroMemory(&displayArea, sizeof(displayArea));
1227
1228 IMFMediaType *mtOptimal = NULL;
1229
1230 UINT64 size;
1231 int width;
1232 int height;
1233
1234 // Clone the proposed type.
1235
1236 hr = MFCreateMediaType(&mtOptimal);
1237 if (FAILED(hr))
1238 goto done;
1239
1240 hr = proposedType->CopyAllItems(mtOptimal);
1241 if (FAILED(hr))
1242 goto done;
1243
1244 // Modify the new type.
1245
1246 hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
1247 width = int(HI32(size));
1248 height = int(LO32(size));
1249
1250 if (m_cropRect.isValid()) {
1251 rcOutput.left = m_cropRect.x();
1252 rcOutput.top = m_cropRect.y();
1253 rcOutput.right = m_cropRect.x() + m_cropRect.width();
1254 rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
1255
1256 m_sourceRect.left = float(m_cropRect.x()) / width;
1257 m_sourceRect.top = float(m_cropRect.y()) / height;
1258 m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
1259 m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
1260
1261 if (m_mixer)
1262 configureMixer(m_mixer.Get());
1263 } else {
1264 rcOutput.left = 0;
1265 rcOutput.top = 0;
1266 rcOutput.right = width;
1267 rcOutput.bottom = height;
1268 }
1269
1270 // Set the geometric aperture, and disable pan/scan.
1271 displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
1272 rcOutput.bottom - rcOutput.top);
1273
1274 hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
1275 if (FAILED(hr))
1276 goto done;
1277
1278 hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1279 sizeof(displayArea));
1280 if (FAILED(hr))
1281 goto done;
1282
1283 // Set the pan/scan aperture and the minimum display aperture. We don't care
1284 // about them per se, but the mixer will reject the type if these exceed the
1285 // frame dimentions.
1286 hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1287 sizeof(displayArea));
1288 if (FAILED(hr))
1289 goto done;
1290
1291 hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1292 sizeof(displayArea));
1293 if (FAILED(hr))
1294 goto done;
1295
1296 // Return the pointer to the caller.
1297 *optimalType = mtOptimal;
1298 (*optimalType)->AddRef();
1299
1300done:
1301 qt_evr_safe_release(&mtOptimal);
1302 return hr;
1303
1304}
1305
1306HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
1307{
1308 // Note: mediaType can be NULL (to clear the type)
1309
1310 // Clearing the media type is allowed in any state (including shutdown).
1311 if (!mediaType) {
1312 stopSurface();
1313 m_mediaType.Reset();
1314 releaseResources();
1315 return S_OK;
1316 }
1317
1318 MFRatio fps = { 0, 0 };
1319 QList<ComPtr<IMFSample>> sampleQueue;
1320
1321 // Cannot set the media type after shutdown.
1322 HRESULT hr = checkShutdown();
1323 if (FAILED(hr))
1324 goto done;
1325
1326 // Check if the new type is actually different.
1327 // Note: This function safely handles NULL input parameters.
1328 if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
1329 goto done; // Nothing more to do.
1330
1331 // We're really changing the type. First get rid of the old type.
1332 m_mediaType.Reset();
1333 releaseResources();
1334
1335 // Initialize the presenter engine with the new media type.
1336 // The presenter engine allocates the samples.
1337
1338 hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
1339 if (FAILED(hr))
1340 goto done;
1341
1342 // Mark each sample with our token counter. If this batch of samples becomes
1343 // invalid, we increment the counter, so that we know they should be discarded.
1344 for (auto sample : std::as_const(sampleQueue)) {
1345 hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
1346 if (FAILED(hr))
1347 goto done;
1348 }
1349
1350 // Add the samples to the sample pool.
1351 hr = m_samplePool.initialize(std::move(sampleQueue));
1352 if (FAILED(hr))
1353 goto done;
1354
1355 // Set the frame rate on the scheduler.
1356 if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
1357 m_scheduler.setFrameRate(fps);
1358 } else {
1359 // NOTE: The mixer's proposed type might not have a frame rate, in which case
1360 // we'll use an arbitrary default. (Although it's unlikely the video source
1361 // does not have a frame rate.)
1362 m_scheduler.setFrameRate(g_DefaultFrameRate);
1363 }
1364
1365 // Store the media type.
1366 m_mediaType = mediaType;
1367 m_mediaType->AddRef();
1368
1369 startSurface();
1370
1371done:
1372 if (FAILED(hr))
1373 releaseResources();
1374 return hr;
1375}
1376
1377HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
1378{
1379 D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
1380 BOOL compressed = FALSE;
1381 MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
1382 MFVideoArea videoCropArea;
1383 UINT32 width = 0, height = 0;
1384
1385 // Validate the format.
1386 HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
1387 if (FAILED(hr))
1388 return hr;
1389
1391 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
1392 return MF_E_INVALIDMEDIATYPE;
1393
1394 // Reject compressed media types.
1395 hr = proposed->IsCompressedFormat(&compressed);
1396 if (FAILED(hr))
1397 return hr;
1398
1399 if (compressed)
1400 return MF_E_INVALIDMEDIATYPE;
1401
1402 // The D3DPresentEngine checks whether surfaces can be created using this format
1403 hr = m_presentEngine->checkFormat(d3dFormat);
1404 if (FAILED(hr))
1405 return hr;
1406
1407 // Reject interlaced formats.
1408 hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
1409 if (FAILED(hr))
1410 return hr;
1411
1412 if (interlaceMode != MFVideoInterlace_Progressive)
1413 return MF_E_INVALIDMEDIATYPE;
1414
1415 hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
1416 if (FAILED(hr))
1417 return hr;
1418
1419 // Validate the various apertures (cropping regions) against the frame size.
1420 // Any of these apertures may be unspecified in the media type, in which case
1421 // we ignore it. We just want to reject invalid apertures.
1422
1423 if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
1424 reinterpret_cast<UINT8*>(&videoCropArea),
1425 sizeof(videoCropArea), nullptr))) {
1426 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1427 }
1428 if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
1429 reinterpret_cast<UINT8*>(&videoCropArea),
1430 sizeof(videoCropArea), nullptr))) {
1431 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1432 }
1433 if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
1434 reinterpret_cast<UINT8*>(&videoCropArea),
1435 sizeof(videoCropArea), nullptr))) {
1436 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1437 }
1438 return hr;
1439}
1440
1441void EVRCustomPresenter::processOutputLoop()
1442{
1443 HRESULT hr = S_OK;
1444
1445 // Process as many samples as possible.
1446 while (hr == S_OK) {
1447 // If the mixer doesn't have a new input sample, break from the loop.
1448 if (!m_sampleNotify) {
1449 hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
1450 break;
1451 }
1452
1453 // Try to process a sample.
1454 hr = processOutput();
1455
1456 // NOTE: ProcessOutput can return S_FALSE to indicate it did not
1457 // process a sample. If so, break out of the loop.
1458 }
1459
1460 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1461 // The mixer has run out of input data. Check for end-of-stream.
1462 checkEndOfStream();
1463 }
1464}
1465
1466HRESULT EVRCustomPresenter::processOutput()
1467{
1468 // If the clock is not running, we present the first sample,
1469 // and then don't present any more until the clock starts.
1470 if ((m_renderState != RenderStarted) && m_prerolled)
1471 return S_FALSE;
1472
1473 // Make sure we have a pointer to the mixer.
1474 if (!m_mixer)
1475 return MF_E_INVALIDREQUEST;
1476
1477 // Try to get a free sample from the video sample pool.
1478 ComPtr<IMFSample> sample = m_samplePool.takeSample();
1479 if (!sample)
1480 return S_FALSE; // No free samples. Try again when a sample is released.
1481
1482 // From now on, we have a valid video sample pointer, where the mixer will
1483 // write the video data.
1484
1485 LONGLONG mixerStartTime = 0, mixerEndTime = 0;
1486 MFTIME systemTime = 0;
1487
1488 if (m_clock) {
1489 // Latency: Record the starting time for ProcessOutput.
1490 m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
1491 }
1492
1493 // Now we are ready to get an output sample from the mixer.
1494 DWORD status = 0;
1495 MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
1496 dataBuffer.pSample = sample.Get();
1497 HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
1498 // Important: Release any events returned from the ProcessOutput method.
1499 qt_evr_safe_release(&dataBuffer.pEvents);
1500
1501 if (FAILED(hr)) {
1502 // Return the sample to the pool.
1503 m_samplePool.returnSample(sample);
1504
1505 // Handle some known error codes from ProcessOutput.
1506 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
1507 // The mixer's format is not set. Negotiate a new format.
1508 hr = renegotiateMediaType();
1509 } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1510 // There was a dynamic media type change. Clear our media type.
1511 setMediaType(NULL);
1512 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1513 // The mixer needs more input.
1514 // We have to wait for the mixer to get more input.
1515 m_sampleNotify = false;
1516 }
1517
1518 return hr;
1519 }
1520
1521 // We got an output sample from the mixer.
1522 if (m_clock) {
1523 // Latency: Record the ending time for the ProcessOutput operation,
1524 // and notify the EVR of the latency.
1525
1526 m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
1527
1528 LONGLONG latencyTime = mixerEndTime - mixerStartTime;
1529 notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
1530 }
1531
1532 // Set up notification for when the sample is released.
1533 hr = trackSample(sample);
1534 if (FAILED(hr))
1535 return hr;
1536
1537 // Schedule the sample.
1538 if (m_frameStep.state == FrameStepNone)
1539 hr = deliverSample(sample);
1540 else // We are frame-stepping
1541 hr = deliverFrameStepSample(sample);
1542
1543 if (FAILED(hr))
1544 return hr;
1545
1546 m_prerolled = true; // We have presented at least one sample now.
1547 return S_OK;
1548}
1549
1550HRESULT EVRCustomPresenter::deliverSample(const ComPtr<IMFSample> &sample)
1551{
1552 // If we are not actively playing, OR we are scrubbing (rate = 0),
1553 // then we need to present the sample immediately. Otherwise,
1554 // schedule it normally.
1555
1556 bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
1557
1558 HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
1559
1560 if (FAILED(hr)) {
1561 // Notify the EVR that we have failed during streaming. The EVR will notify the
1562 // pipeline.
1563
1564 notifyEvent(EC_ERRORABORT, hr, 0);
1565 }
1566
1567 return hr;
1568}
1569
1570HRESULT EVRCustomPresenter::deliverFrameStepSample(const ComPtr<IMFSample> &sample)
1571{
1572 HRESULT hr = S_OK;
1573 IUnknown *unk = NULL;
1574
1575 // For rate 0, discard any sample that ends earlier than the clock time.
1576 if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
1577 // Discard this sample.
1578 } else if (m_frameStep.state >= FrameStepScheduled) {
1579 // A frame was already submitted. Put this sample on the frame-step queue,
1580 // in case we are asked to step to the next frame. If frame-stepping is
1581 // cancelled, this sample will be processed normally.
1582 m_frameStep.samples.append(sample);
1583 } else {
1584 // We're ready to frame-step.
1585
1586 // Decrement the number of steps.
1587 if (m_frameStep.steps > 0)
1588 m_frameStep.steps--;
1589
1590 if (m_frameStep.steps > 0) {
1591 // This is not the last step. Discard this sample.
1592 } else if (m_frameStep.state == FrameStepWaitingStart) {
1593 // This is the right frame, but the clock hasn't started yet. Put the
1594 // sample on the frame-step queue. When the clock starts, the sample
1595 // will be processed.
1596 m_frameStep.samples.append(sample);
1597 } else {
1598 // This is the right frame *and* the clock has started. Deliver this sample.
1599 hr = deliverSample(sample);
1600 if (FAILED(hr))
1601 goto done;
1602
1603 // Query for IUnknown so that we can identify the sample later.
1604 // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
1605 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1606 if (FAILED(hr))
1607 goto done;
1608
1609 m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
1610
1611 // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
1612 // sample from invoking the OnSampleFree callback after the sample is presented.
1613 // We use this IUnknown pointer purely to identify the sample later; we never
1614 // attempt to dereference the pointer.
1615
1616 m_frameStep.state = FrameStepScheduled;
1617 }
1618 }
1619done:
1620 qt_evr_safe_release(&unk);
1621 return hr;
1622}
1623
1624HRESULT EVRCustomPresenter::trackSample(const ComPtr<IMFSample> &sample)
1625{
1626 IMFTrackedSample *tracked = NULL;
1627
1628 HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
1629
1630 if (SUCCEEDED(hr))
1631 hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
1632
1633 qt_evr_safe_release(&tracked);
1634 return hr;
1635}
1636
1637void EVRCustomPresenter::releaseResources()
1638{
1639 // Increment the token counter to indicate that all existing video samples
1640 // are "stale." As these samples get released, we'll dispose of them.
1641 //
1642 // Note: The token counter is required because the samples are shared
1643 // between more than one thread, and they are returned to the presenter
1644 // through an asynchronous callback (onSampleFree). Without the token, we
1645 // might accidentally re-use a stale sample after the ReleaseResources
1646 // method returns.
1647
1648 m_tokenCounter++;
1649
1650 flush();
1651
1652 m_samplePool.clear();
1653
1654 m_presentEngine->releaseResources();
1655}
1656
1657HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
1658{
1659 IUnknown *object = NULL;
1660 IMFSample *sample = NULL;
1661 IUnknown *unk = NULL;
1662 UINT32 token;
1663
1664 // Get the sample from the async result object.
1665 HRESULT hr = result->GetObject(&object);
1666 if (FAILED(hr))
1667 goto done;
1668
1669 hr = object->QueryInterface(IID_PPV_ARGS(&sample));
1670 if (FAILED(hr))
1671 goto done;
1672
1673 // If this sample was submitted for a frame-step, the frame step operation
1674 // is complete.
1675
1676 if (m_frameStep.state == FrameStepScheduled) {
1677 // Query the sample for IUnknown and compare it to our cached value.
1678 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1679 if (FAILED(hr))
1680 goto done;
1681
1682 if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
1683 // Notify the EVR.
1684 hr = completeFrameStep(sample);
1685 if (FAILED(hr))
1686 goto done;
1687 }
1688
1689 // Note: Although object is also an IUnknown pointer, it is not
1690 // guaranteed to be the exact pointer value returned through
1691 // QueryInterface. Therefore, the second QueryInterface call is
1692 // required.
1693 }
1694
1695 m_mutex.lock();
1696
1697 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
1698
1699 if (token == m_tokenCounter) {
1700 // Return the sample to the sample pool.
1701 m_samplePool.returnSample(sample);
1702 // A free sample is available. Process more data if possible.
1703 processOutputLoop();
1704 }
1705
1706 m_mutex.unlock();
1707
1708done:
1709 if (FAILED(hr))
1710 notifyEvent(EC_ERRORABORT, hr, 0);
1711 qt_evr_safe_release(&object);
1712 qt_evr_safe_release(&sample);
1713 qt_evr_safe_release(&unk);
1714 return hr;
1715}
1716
1717float EVRCustomPresenter::getMaxRate(bool thin)
1718{
1719 // Non-thinned:
1720 // If we have a valid frame rate and a monitor refresh rate, the maximum
1721 // playback rate is equal to the refresh rate. Otherwise, the maximum rate
1722 // is unbounded (FLT_MAX).
1723
1724 // Thinned: The maximum rate is unbounded.
1725
1726 float maxRate = FLT_MAX;
1727 MFRatio fps = { 0, 0 };
1728 UINT monitorRateHz = 0;
1729
1730 if (!thin && m_mediaType) {
1731 qt_evr_getFrameRate(m_mediaType.Get(), &fps);
1732 monitorRateHz = m_presentEngine->refreshRate();
1733
1734 if (fps.Denominator && fps.Numerator && monitorRateHz) {
1735 // Max Rate = Refresh Rate / Frame Rate
1736 maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
1737 }
1738 }
1739
1740 return maxRate;
1741}
1742
1744{
1745 switch (int(e->type())) {
1746 case StartSurface:
1747 startSurface();
1748 return true;
1749 case StopSurface:
1750 stopSurface();
1751 return true;
1752 case PresentSample:
1753 presentSample(static_cast<PresentSampleEvent *>(e)->sample());
1754 return true;
1755 default:
1756 break;
1757 }
1758 return QObject::event(e);
1759}
1760
1762{
1763 if (thread() != QThread::currentThread()) {
1765 return;
1766 }
1767}
1768
1770{
1771 if (thread() != QThread::currentThread()) {
1773 return;
1774 }
1775}
1776
1777void EVRCustomPresenter::presentSample(const ComPtr<IMFSample> &sample)
1778{
1779 if (thread() != QThread::currentThread()) {
1781 return;
1782 }
1783
1784 if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
1785 return;
1786
1787 QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
1788
1789 // Since start/end times are related to a position when the clock is started,
1790 // to have times from the beginning, need to adjust it by adding seeked position.
1791 if (m_positionOffset) {
1792 if (frame.startTime())
1793 frame.setStartTime(frame.startTime() + m_positionOffset);
1794 if (frame.endTime())
1795 frame.setEndTime(frame.endTime() + m_positionOffset);
1796 }
1797
1798 ComPtr<IMFMediaType> inputStreamType;
1799 if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
1800 auto rotation = static_cast<MFVideoRotationFormat>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
1801 switch (rotation) {
1802 case MFVideoRotationFormat_0: frame.setRotation(QtVideo::Rotation::None); break;
1803 case MFVideoRotationFormat_90: frame.setRotation(QtVideo::Rotation::Clockwise90); break;
1804 case MFVideoRotationFormat_180: frame.setRotation(QtVideo::Rotation::Clockwise180); break;
1805 case MFVideoRotationFormat_270: frame.setRotation(QtVideo::Rotation::Clockwise270); break;
1806 default: frame.setRotation(QtVideo::Rotation::None);
1807 }
1808 }
1809
1810 m_videoSink->platformVideoSink()->setVideoFrame(frame);
1811}
1812
1814{
1815 m_positionOffset = position * 1000;
1816}
1817
1818HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
1819{
1820 if (!mixer)
1821 return E_POINTER;
1822
1823 IMFAttributes *attributes = NULL;
1824
1825 HRESULT hr = mixer->GetAttributes(&attributes);
1826 if (SUCCEEDED(hr)) {
1827 hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
1828 sizeof(sourceRect));
1829 attributes->Release();
1830 }
1831 return hr;
1832}
1833
1835{
1836 GUID majorType;
1837 if (FAILED(type->GetMajorType(&majorType)))
1839 if (majorType != MFMediaType_Video)
1841
1842 GUID subtype;
1843 if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
1845
1847}
1848
void setSink(QVideoSink *sink)
HRESULT createVideoSamples(IMFMediaType *format, QList< ComPtr< IMFSample > > &videoSampleQueue, QSize frameSize)
QVideoFrameFormat videoSurfaceFormat() const
HRESULT getService(REFGUID guidService, REFIID riid, void **ppv)
HRESULT checkFormat(D3DFORMAT format)
QVideoFrame makeVideoFrame(const ComPtr< IMFSample > &sample)
void positionChanged(qint64 position)
STDMETHODIMP GetDeviceID(IID *deviceID) override
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStop(MFTIME systemTime) override
STDMETHODIMP ReleaseServicePointers() override
STDMETHODIMP QueryInterface(REFIID riid, void **ppv) override
STDMETHODIMP OnClockRestart(MFTIME systemTime) override
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override
void setCropRect(QRect cropRect)
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType **mediaType) override
void setSink(QVideoSink *sink)
bool event(QEvent *) override
This virtual function receives events to an object and should return true if the event e was recogniz...
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
void presentSample(const ComPtr< IMFSample > &sample)
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override
STDMETHODIMP OnClockPause(MFTIME systemTime) override
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override
EVRCustomPresenter(QVideoSink *sink=0)
ComPtr< IMFSample > sample() const
PresentSampleEvent(const ComPtr< IMFSample > &sample)
static void postEvent(QObject *receiver, QEvent *event, int priority=Qt::NormalEventPriority)
\inmodule QtCore
Definition qcoreevent.h:45
Type
This enum type defines the valid event types in Qt.
Definition qcoreevent.h:51
Type type() const
Returns the event type.
Definition qcoreevent.h:304
Definition qlist.h:75
bool isEmpty() const noexcept
Definition qlist.h:401
value_type takeFirst()
Definition qlist.h:566
qsizetype count() const noexcept
Definition qlist.h:398
void append(parameter_type t)
Definition qlist.h:458
void clear()
Definition qlist.h:434
\inmodule QtCore
Definition qmutex.h:313
void unlock() noexcept
Unlocks the mutex.
Definition qmutex.h:289
void lock() noexcept
Locks the mutex.
Definition qmutex.h:286
\inmodule QtCore
Definition qobject.h:103
virtual bool event(QEvent *event)
This virtual function receives events to an object and should return true if the event e was recogniz...
Definition qobject.cpp:1389
QThread * thread() const
Returns the thread in which the object lives.
Definition qobject.cpp:1598
void deleteLater()
\threadsafe
Definition qobject.cpp:2435
virtual void setVideoFrame(const QVideoFrame &frame)
void enqueue(const T &t)
Adds value t to the tail of the queue.
Definition qqueue.h:18
void swap(QQueue< T > &other) noexcept
Definition qqueue.h:17
\inmodule QtCore\reentrant
Definition qrect.h:30
constexpr int height() const noexcept
Returns the height of the rectangle.
Definition qrect.h:239
constexpr bool isValid() const noexcept
Returns true if the rectangle is valid, otherwise returns false.
Definition qrect.h:170
constexpr int x() const noexcept
Returns the x-coordinate of the rectangle's left edge.
Definition qrect.h:185
constexpr QSize size() const noexcept
Returns the size of the rectangle.
Definition qrect.h:242
constexpr int width() const noexcept
Returns the width of the rectangle.
Definition qrect.h:236
constexpr int y() const noexcept
Returns the y-coordinate of the rectangle's top edge.
Definition qrect.h:188
static QThread * currentThread()
Definition qthread.cpp:1039
Type get() const noexcept
bool isValid() const
Identifies if a video surface format has a valid pixel format and frame size.
PixelFormat
Enumerates video data types.
static constexpr int NPixelFormats
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:27
The QVideoSink class represents a generic sink for video data.
Definition qvideosink.h:22
QPlatformVideoSink * platformVideoSink() const
ComPtr< IMFSample > takeSample()
void returnSample(const ComPtr< IMFSample > &sample)
HRESULT initialize(QList< ComPtr< IMFSample > > &&samples)
HRESULT stopScheduler()
Scheduler(EVRCustomPresenter *presenter)
HRESULT startScheduler(ComPtr< IMFClock > clock)
void setFrameRate(const MFRatio &fps)
void setClockRate(float rate)
HRESULT processSamplesInQueue(LONG *nextSleep)
HRESULT scheduleSample(const ComPtr< IMFSample > &sample, bool presentNow)
static DWORD WINAPI schedulerThreadProc(LPVOID parameter)
#define this
Definition dialogs.cpp:9
direction
static LONG MFTimeToMsec(const LONGLONG &time)
static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &nrcSource)
static const DWORD SCHEDULER_TIMEOUT
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
static const MFTIME ONE_SECOND
static const LONG ONE_MSEC
static QT_END_NAMESPACE const GUID MFSamplePresenter_SampleCounter
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
QT_BEGIN_NAMESPACE HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
HRESULT qt_evr_validateVideoArea(const MFVideoArea &area, UINT32 width, UINT32 height)
HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
static QT_BEGIN_NAMESPACE void qt_evr_safe_release(T **unk)
Token token
Definition keywords.cpp:444
Combined button and popup list for selecting options.
Q_MULTIMEDIA_EXPORT QVideoFrameFormat::PixelFormat pixelFormatFromMediaSubtype(const GUID &subtype)
#define DWORD_PTR
#define Q_LOGGING_CATEGORY(name,...)
#define qCWarning(category,...)
#define qCDebug(category,...)
auto qFabs(T v)
Definition qmath.h:48
GLsizei samples
GLint GLsizei GLsizei height
GLenum GLuint GLintptr GLsizeiptr size
[1]
GLfloat GLfloat f
GLint GLsizei width
GLenum type
GLuint GLsizei const GLchar * message
GLenum const GLint * param
GLint GLsizei GLsizei GLenum format
GLuint GLenum * rate
GLsizei GLenum GLboolean sink
GLuint64EXT * result
[6]
static qreal position(const QQuickItem *item, QQuickAnchors::Anchor anchorLine)
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
long long qint64
Definition qtypes.h:60
IUIViewSettingsInterop __RPC__in REFIID riid
struct tagMSG MSG
long HRESULT
#define QMM_PRESENTATION_CURRENT_POSITION
QByteArray compressed
QFrame frame
[0]
QSvgRenderer * renderer
[0]
Definition moc.h:23