WMF: enabled HW-accelerated video decoding for the QML video item.
authorYoann Lopes <yoann.lopes@digia.com>
Mon, 14 Jan 2013 16:44:06 +0000 (17:44 +0100)
committerThe Qt Project <gerrit-noreply@qt-project.org>
Wed, 23 Jan 2013 21:34:07 +0000 (22:34 +0100)
It also applies to QGraphicsVideoItem when used on a GL viewport.
We now have a new video sink that is based on Microsoft's EVR sink, we just
replace the default Presenter with our own. Frames are rendered into D3D
surfaces using DXVA, then copied into a shared D3D/EGL surface and finally
bound to a GL texture to be used by the video surface.
The shared D3D/EGL surface is a feature provided by ANGLE and therefore Qt
must be compiled with ANGLE for this new video sink to be compiled and
used.

Change-Id: I0b7b9968eed5488f9ef1a2dcca5213bd0af232ab
Reviewed-by: Yoann Lopes <yoann.lopes@digia.com>
12 files changed:
src/imports/multimedia/qsgvideonode_texture.cpp
src/plugins/wmf/evrcustompresenter.cpp [new file with mode: 0644]
src/plugins/wmf/evrcustompresenter.h [new file with mode: 0644]
src/plugins/wmf/evrd3dpresentengine.cpp [new file with mode: 0644]
src/plugins/wmf/evrd3dpresentengine.h [new file with mode: 0644]
src/plugins/wmf/mfactivate.cpp [new file with mode: 0644]
src/plugins/wmf/mfactivate.h [new file with mode: 0644]
src/plugins/wmf/mfglobal.cpp [new file with mode: 0644]
src/plugins/wmf/mfglobal.h [new file with mode: 0644]
src/plugins/wmf/player/mfvideorenderercontrol.cpp
src/plugins/wmf/player/mfvideorenderercontrol.h
src/plugins/wmf/wmf.pro

index 80a42e5..e0d9737 100644 (file)
@@ -118,6 +118,7 @@ protected:
         "    gl_FragColor = texture2D(rgbTexture, qt_TexCoord) * opacity;"
         "}";
 
+#ifndef QT_OPENGL_ES_2_ANGLE
         static const char *colorsSwapShader =
         "uniform sampler2D rgbTexture;"
         "uniform lowp float opacity;"
@@ -137,6 +138,9 @@ protected:
             default:
                 return shader;
         }
+#else
+        return shader;
+#endif
     }
 
     virtual void initialize() {
diff --git a/src/plugins/wmf/evrcustompresenter.cpp b/src/plugins/wmf/evrcustompresenter.cpp
new file mode 100644 (file)
index 0000000..eb73e67
--- /dev/null
@@ -0,0 +1,2052 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "evrcustompresenter.h"
+
+#include "mfglobal.h"
+#include "evrd3dpresentengine.h"
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qrect.h>
+#include <qabstractvideosurface.h>
+#include <qthread.h>
+#include <qcoreapplication.h>
+#include <QtCore/qdebug.h>
+#include <d3d9.h>
+#include <dshow.h>
+
+QT_USE_NAMESPACE
+
+const static MFRatio g_DefaultFrameRate = { 30, 1 };
+static const DWORD SCHEDULER_TIMEOUT = 5000;
+static const MFTIME ONE_SECOND = 10000000;
+static const LONG   ONE_MSEC = 1000;
+
+// Function declarations.
+static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
+static HRESULT clearDesiredSampleTime(IMFSample *sample);
+static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
+static DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat);
+static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+
+static inline LONG MFTimeToMsec(const LONGLONG& time)
+{
+    return (LONG)(time / (ONE_SECOND / ONE_MSEC));
+}
+
+
+Scheduler::Scheduler()
+    : m_CB(NULL)
+    , m_clock(NULL)
+    , m_threadID(0)
+    , m_schedulerThread(0)
+    , m_threadReadyEvent(0)
+    , m_flushEvent(0)
+    , m_playbackRate(1.0f)
+    , m_lastSampleTime(0)
+    , m_perFrameInterval(0)
+    , m_perFrame_1_4th(0)
+{
+}
+
+Scheduler::~Scheduler()
+{
+    qt_wmf_safeRelease(&m_clock);
+    for (int i = 0; i < m_scheduledSamples.size(); ++i)
+        m_scheduledSamples[i]->Release();
+    m_scheduledSamples.clear();
+}
+
+void Scheduler::setFrameRate(const MFRatio& fps)
+{
+    UINT64 AvgTimePerFrame = 0;
+
+    // Convert to a duration.
+    MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
+
+    m_perFrameInterval = (MFTIME)AvgTimePerFrame;
+
+    // Calculate 1/4th of this value, because we use it frequently.
+    m_perFrame_1_4th = m_perFrameInterval / 4;
+}
+
+HRESULT Scheduler::startScheduler(IMFClock *clock)
+{
+    if (m_schedulerThread)
+        return E_UNEXPECTED;
+
+    HRESULT hr = S_OK;
+    DWORD dwID = 0;
+
+    qt_wmf_copyComPointer(m_clock, clock);
+
+    // Set a high the timer resolution (ie, short timer period).
+    timeBeginPeriod(1);
+
+    // Create an event to wait for the thread to start.
+    m_threadReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    if (!m_threadReadyEvent) {
+        hr = HRESULT_FROM_WIN32(GetLastError());
+        goto done;
+    }
+
+    // Create an event to wait for flush commands to complete.
+    m_flushEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    if (!m_flushEvent) {
+        hr = HRESULT_FROM_WIN32(GetLastError());
+        goto done;
+    }
+
+    // Create the scheduler thread.
+    m_schedulerThread = CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID);
+    if (!m_schedulerThread) {
+        hr = HRESULT_FROM_WIN32(GetLastError());
+        goto done;
+    }
+
+    HANDLE hObjects[] = { m_threadReadyEvent, m_schedulerThread };
+    DWORD dwWait = 0;
+
+    // Wait for the thread to signal the "thread ready" event.
+    dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE);  // Wait for EITHER of these handles.
+    if (WAIT_OBJECT_0 != dwWait) {
+        // The thread terminated early for some reason. This is an error condition.
+        CloseHandle(m_schedulerThread);
+        m_schedulerThread = NULL;
+
+        hr = E_UNEXPECTED;
+        goto done;
+    }
+
+    m_threadID = dwID;
+
+done:
+    // Regardless success/failure, we are done using the "thread ready" event.
+    if (m_threadReadyEvent) {
+        CloseHandle(m_threadReadyEvent);
+        m_threadReadyEvent = NULL;
+    }
+    return hr;
+}
+
+HRESULT Scheduler::stopScheduler()
+{
+    if (!m_schedulerThread)
+        return S_OK;
+
+    // Ask the scheduler thread to exit.
+    PostThreadMessage(m_threadID, Terminate, 0, 0);
+
+    // Wait for the thread to exit.
+    WaitForSingleObject(m_schedulerThread, INFINITE);
+
+    // Close handles.
+    CloseHandle(m_schedulerThread);
+    m_schedulerThread = NULL;
+
+    CloseHandle(m_flushEvent);
+    m_flushEvent = NULL;
+
+    // Discard samples.
+    m_mutex.lock();
+    for (int i = 0; i < m_scheduledSamples.size(); ++i)
+        m_scheduledSamples[i]->Release();
+    m_scheduledSamples.clear();
+    m_mutex.unlock();
+
+    // Restore the timer resolution.
+    timeEndPeriod(1);
+
+    return S_OK;
+}
+
+HRESULT Scheduler::flush()
+{
+    if (m_schedulerThread) {
+        // Ask the scheduler thread to flush.
+        PostThreadMessage(m_threadID, Flush, 0 , 0);
+
+        // Wait for the scheduler thread to signal the flush event,
+        // OR for the thread to terminate.
+        HANDLE objects[] = { m_flushEvent, m_schedulerThread };
+
+        WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
+    }
+
+    return S_OK;
+}
+
+HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow)
+{
+    if (!m_CB)
+        return MF_E_NOT_INITIALIZED;
+
+    if (!m_schedulerThread)
+        return MF_E_NOT_INITIALIZED;
+
+    HRESULT hr = S_OK;
+    DWORD dwExitCode = 0;
+
+    GetExitCodeThread(m_schedulerThread, &dwExitCode);
+    if (dwExitCode != STILL_ACTIVE)
+        return E_FAIL;
+
+    if (presentNow || !m_clock) {
+        // Present the sample immediately.
+        sample->AddRef();
+        QMetaObject::invokeMethod(m_CB,
+                                  "presentSample",
+                                  Qt::QueuedConnection,
+                                  Q_ARG(void*, sample),
+                                  Q_ARG(qint64, 0));
+    } else {
+        // Queue the sample and ask the scheduler thread to wake up.
+        m_mutex.lock();
+        sample->AddRef();
+        m_scheduledSamples.enqueue(sample);
+        m_mutex.unlock();
+
+        if (SUCCEEDED(hr))
+            PostThreadMessage(m_threadID, Schedule, 0, 0);
+    }
+
+    return hr;
+}
+
+HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep)
+{
+    HRESULT hr = S_OK;
+    LONG wait = 0;
+    IMFSample *sample = NULL;
+
+    // Process samples until the queue is empty or until the wait time > 0.
+    while (!m_scheduledSamples.isEmpty()) {
+        m_mutex.lock();
+        sample = m_scheduledSamples.dequeue();
+        m_mutex.unlock();
+
+        // Process the next sample in the queue. If the sample is not ready
+        // for presentation. the value returned in wait is > 0, which
+        // means the scheduler should sleep for that amount of time.
+
+        hr = processSample(sample, &wait);
+        qt_wmf_safeRelease(&sample);
+
+        if (FAILED(hr) || wait > 0)
+            break;
+    }
+
+    // If the wait time is zero, it means we stopped because the queue is
+    // empty (or an error occurred). Set the wait time to infinite; this will
+    // make the scheduler thread sleep until it gets another thread message.
+    if (wait == 0)
+        wait = INFINITE;
+
+    *nextSleep = wait;
+    return hr;
+}
+
+HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep)
+{
+    HRESULT hr = S_OK;
+
+    LONGLONG hnsPresentationTime = 0;
+    LONGLONG hnsTimeNow = 0;
+    MFTIME   hnsSystemTime = 0;
+
+    bool presentNow = true;
+    LONG nextSleep = 0;
+
+    if (m_clock) {
+        // Get the sample's time stamp. It is valid for a sample to
+        // have no time stamp.
+        hr = sample->GetSampleTime(&hnsPresentationTime);
+
+        // Get the clock time. (But if the sample does not have a time stamp,
+        // we don't need the clock time.)
+        if (SUCCEEDED(hr))
+            hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+        // Calculate the time until the sample's presentation time.
+        // A negative value means the sample is late.
+        LONGLONG hnsDelta = hnsPresentationTime - hnsTimeNow;
+        if (m_playbackRate < 0) {
+            // For reverse playback, the clock runs backward. Therefore, the
+            // delta is reversed.
+            hnsDelta = - hnsDelta;
+        }
+
+        if (hnsDelta < - m_perFrame_1_4th) {
+            // This sample is late.
+            presentNow = true;
+        } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
+            // This sample is still too early. Go to sleep.
+            nextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
+
+            // Adjust the sleep time for the clock rate. (The presentation clock runs
+            // at m_fRate, but sleeping uses the system clock.)
+            if (m_playbackRate != 0)
+                nextSleep = (LONG)(nextSleep / fabsf(m_playbackRate));
+
+            // Don't present yet.
+            presentNow = false;
+        }
+    }
+
+    if (presentNow) {
+        sample->AddRef();
+        QMetaObject::invokeMethod(m_CB,
+                                  "presentSample",
+                                  Qt::QueuedConnection,
+                                  Q_ARG(void*, sample),
+                                  Q_ARG(qint64, hnsPresentationTime));
+    } else {
+        // The sample is not ready yet. Return it to the queue.
+        m_mutex.lock();
+        sample->AddRef();
+        m_scheduledSamples.prepend(sample);
+        m_mutex.unlock();
+    }
+
+    *pNextSleep = nextSleep;
+
+    return hr;
+}
+
+DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
+{
+    Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
+    if (!scheduler)
+        return -1;
+    return scheduler->schedulerThreadProcPrivate();
+}
+
+DWORD Scheduler::schedulerThreadProcPrivate()
+{
+    HRESULT hr = S_OK;
+    MSG msg;
+    LONG wait = INFINITE;
+    bool exitThread = false;
+
+    // Force the system to create a message queue for this thread.
+    // (See MSDN documentation for PostThreadMessage.)
+    PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
+
+    // Signal to the scheduler that the thread is ready.
+    SetEvent(m_threadReadyEvent);
+
+    while (!exitThread) {
+        // Wait for a thread message OR until the wait time expires.
+        DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
+
+        if (result == WAIT_TIMEOUT) {
+            // If we timed out, then process the samples in the queue
+            hr = processSamplesInQueue(&wait);
+            if (FAILED(hr))
+                exitThread = true;
+        }
+
+        while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+            bool processSamples = true;
+
+            switch (msg.message) {
+            case Terminate:
+                exitThread = true;
+                break;
+            case Flush:
+                // Flushing: Clear the sample queue and set the event.
+                m_mutex.lock();
+                for (int i = 0; i < m_scheduledSamples.size(); ++i)
+                    m_scheduledSamples[i]->Release();
+                m_scheduledSamples.clear();
+                m_mutex.unlock();
+                wait = INFINITE;
+                SetEvent(m_flushEvent);
+                break;
+            case Schedule:
+                // Process as many samples as we can.
+                if (processSamples) {
+                    hr = processSamplesInQueue(&wait);
+                    if (FAILED(hr))
+                        exitThread = true;
+                    processSamples = (wait != INFINITE);
+                }
+                break;
+            }
+        }
+
+    }
+
+    return (SUCCEEDED(hr) ? 0 : 1);
+}
+
+
+SamplePool::SamplePool()
+    : m_initialized(false)
+    , m_pending(0)
+{
+}
+
+SamplePool::~SamplePool()
+{
+    clear();
+}
+
+HRESULT SamplePool::getSample(IMFSample **sample)
+{
+    QMutexLocker locker(&m_mutex);
+
+    if (!m_initialized)
+        return MF_E_NOT_INITIALIZED;
+
+    if (m_videoSampleQueue.isEmpty())
+        return MF_E_SAMPLEALLOCATOR_EMPTY;
+
+    // Get a sample from the allocated queue.
+
+    // It doesn't matter if we pull them from the head or tail of the list,
+    // but when we get it back, we want to re-insert it onto the opposite end.
+    // (see ReturnSample)
+
+    IMFSample *taken = m_videoSampleQueue.takeFirst();
+
+    m_pending++;
+
+    // Give the sample to the caller.
+    *sample = taken;
+    (*sample)->AddRef();
+
+    taken->Release();
+
+    return S_OK;
+}
+
+HRESULT SamplePool::returnSample(IMFSample *sample)
+{
+    QMutexLocker locker(&m_mutex);
+
+    if (!m_initialized)
+        return MF_E_NOT_INITIALIZED;
+
+    m_videoSampleQueue.append(sample);
+    sample->AddRef();
+
+    m_pending--;
+
+    return S_OK;
+}
+
+BOOL SamplePool::areSamplesPending()
+{
+    QMutexLocker locker(&m_mutex);
+
+    bool ret = false;
+
+    if (!m_initialized)
+        ret = false;
+    else
+        ret = (m_pending > 0);
+
+    return ret;
+}
+
+HRESULT SamplePool::initialize(QList<IMFSample*> &samples)
+{
+    QMutexLocker locker(&m_mutex);
+
+    if (m_initialized)
+        return MF_E_INVALIDREQUEST;
+
+    IMFSample *sample = NULL;
+
+    // Move these samples into our allocated queue.
+    for (int i = 0; i < samples.size(); ++i) {
+        sample = samples.at(i);
+        sample->AddRef();
+        m_videoSampleQueue.append(sample);
+    }
+
+    m_initialized = true;
+
+    for (int i = 0; i < samples.size(); ++i)
+        samples[i]->Release();
+    samples.clear();
+    return S_OK;
+}
+
+HRESULT SamplePool::clear()
+{
+    QMutexLocker locker(&m_mutex);
+
+    for (int i = 0; i < m_videoSampleQueue.size(); ++i)
+        m_videoSampleQueue[i]->Release();
+    m_videoSampleQueue.clear();
+    m_initialized = false;
+    m_pending = 0;
+
+    return S_OK;
+}
+
+
+EVRCustomPresenter::EVRCustomPresenter()
+    : QObject()
+    , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
+    , m_refCount(1)
+    , m_renderState(RenderShutdown)
+    , m_mutex(QMutex::Recursive)
+    , m_tokenCounter(0)
+    , m_sampleNotify(false)
+    , m_repaint(false)
+    , m_prerolled(false)
+    , m_endStreaming(false)
+    , m_playbackRate(1.0f)
+    , m_D3DPresentEngine(0)
+    , m_clock(0)
+    , m_mixer(0)
+    , m_mediaEventSink(0)
+    , m_mediaType(0)
+    , m_surface(0)
+{
+    // Initial source rectangle = (0,0,1,1)
+    m_sourceRect.top = 0;
+    m_sourceRect.left = 0;
+    m_sourceRect.bottom = 1;
+    m_sourceRect.right = 1;
+
+    m_D3DPresentEngine = new D3DPresentEngine;
+    m_scheduler.setCallback(m_D3DPresentEngine);
+}
+
+EVRCustomPresenter::~EVRCustomPresenter()
+{
+    qt_wmf_safeRelease(&m_clock);
+    qt_wmf_safeRelease(&m_mixer);
+    qt_wmf_safeRelease(&m_mediaEventSink);
+    qt_wmf_safeRelease(&m_mediaType);
+
+    m_D3DPresentEngine->deleteLater();
+}
+
+HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
+{
+    if (!ppvObject)
+        return E_POINTER;
+    if (riid == IID_IMFGetService) {
+        *ppvObject = static_cast<IMFGetService*>(this);
+    } else if (riid == IID_IMFTopologyServiceLookupClient) {
+        *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
+    } else if (riid == IID_IMFVideoDeviceID) {
+        *ppvObject = static_cast<IMFVideoDeviceID*>(this);
+    } else if (riid == IID_IMFVideoPresenter) {
+        *ppvObject = static_cast<IMFVideoPresenter*>(this);
+    } else if (riid == IID_IMFRateSupport) {
+        *ppvObject = static_cast<IMFRateSupport*>(this);
+    } else if (riid == IID_IUnknown) {
+        *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
+    } else if (riid == IID_IMFClockStateSink) {
+        *ppvObject = static_cast<IMFClockStateSink*>(this);
+    } else {
+        *ppvObject =  NULL;
+        return E_NOINTERFACE;
+    }
+    AddRef();
+    return S_OK;
+}
+
+ULONG EVRCustomPresenter::AddRef()
+{
+    return InterlockedIncrement(&m_refCount);
+}
+
+ULONG EVRCustomPresenter::Release()
+{
+    ULONG uCount = InterlockedDecrement(&m_refCount);
+    if (uCount == 0)
+        delete this;
+    return uCount;
+}
+
+HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
+{
+    HRESULT hr = S_OK;
+
+    if (!ppvObject)
+        return E_POINTER;
+
+    // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
+    if (guidService != MR_VIDEO_RENDER_SERVICE)
+        return MF_E_UNSUPPORTED_SERVICE;
+
+    // First try to get the service interface from the D3DPresentEngine object.
+    hr = m_D3DPresentEngine->getService(guidService, riid, ppvObject);
+    if (FAILED(hr))
+        // Next, check if this object supports the interface.
+        hr = QueryInterface(riid, ppvObject);
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID)
+{
+    if (!deviceID)
+        return E_POINTER;
+
+    *deviceID = IID_IDirect3DDevice9;
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
+{
+    if (!lookup)
+        return E_POINTER;
+
+    HRESULT hr = S_OK;
+    DWORD objectCount = 0;
+
+    QMutexLocker locker(&m_mutex);
+
+    // Do not allow initializing when playing or paused.
+    if (isActive())
+        return MF_E_INVALIDREQUEST;
+
+    qt_wmf_safeRelease(&m_clock);
+    qt_wmf_safeRelease(&m_mixer);
+    qt_wmf_safeRelease(&m_mediaEventSink);
+
+    // Ask for the clock. Optional, because the EVR might not have a clock.
+    objectCount = 1;
+
+    lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+                          MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
+                          &objectCount
+                          );
+
+    // Ask for the mixer. (Required.)
+    objectCount = 1;
+
+    hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+                               MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
+                               &objectCount
+                               );
+
+    if (FAILED(hr))
+        return hr;
+
+    // Make sure that we can work with this mixer.
+    hr = configureMixer(m_mixer);
+    if (FAILED(hr))
+        return hr;
+
+    // Ask for the EVR's event-sink interface. (Required.)
+    objectCount = 1;
+
+    hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+                               MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
+                               &objectCount
+                               );
+
+    if (SUCCEEDED(hr))
+        m_renderState = RenderStopped;
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::ReleaseServicePointers()
+{
+    // Enter the shut-down state.
+    m_mutex.lock();
+
+    m_renderState = RenderShutdown;
+
+    m_mutex.unlock();
+
+    // Flush any samples that were scheduled.
+    flush();
+
+    // Clear the media type and release related resources.
+    setMediaType(NULL);
+
+    // Release all services that were acquired from InitServicePointers.
+    qt_wmf_safeRelease(&m_clock);
+    qt_wmf_safeRelease(&m_mixer);
+    qt_wmf_safeRelease(&m_mediaEventSink);
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
+{
+    HRESULT hr = S_OK;
+
+    QMutexLocker locker(&m_mutex);
+
+    hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    switch (message) {
+    // Flush all pending samples.
+    case MFVP_MESSAGE_FLUSH:
+        hr = flush();
+        break;
+
+    // Renegotiate the media type with the mixer.
+    case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
+        hr = renegotiateMediaType();
+        break;
+
+    // The mixer received a new input sample.
+    case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
+        hr = processInputNotify();
+        break;
+
+    // Streaming is about to start.
+    case MFVP_MESSAGE_BEGINSTREAMING:
+        hr = beginStreaming();
+        break;
+
+    // Streaming has ended. (The EVR has stopped.)
+    case MFVP_MESSAGE_ENDSTREAMING:
+        hr = endStreaming();
+        break;
+
+    // All input streams have ended.
+    case MFVP_MESSAGE_ENDOFSTREAM:
+        // Set the EOS flag.
+        m_endStreaming = true;
+        // Check if it's time to send the EC_COMPLETE event to the EVR.
+        hr = checkEndOfStream();
+        break;
+
+    // Frame-stepping is starting.
+    case MFVP_MESSAGE_STEP:
+        hr = prepareFrameStep(DWORD(param));
+        break;
+
+    // Cancels frame-stepping.
+    case MFVP_MESSAGE_CANCELSTEP:
+        hr = cancelFrameStep();
+        break;
+
+    default:
+        hr = E_INVALIDARG; // Unknown message. This case should never occur.
+        break;
+    }
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
+{
+    HRESULT hr = S_OK;
+
+    if (!mediaType)
+        return E_POINTER;
+
+    *mediaType = NULL;
+
+    QMutexLocker locker(&m_mutex);
+
+    hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    if (!m_mediaType)
+        return MF_E_NOT_INITIALIZED;
+
+    return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
+}
+
+HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
+{
+    QMutexLocker locker(&m_mutex);
+
+    // We cannot start after shutdown.
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    // Check if the clock is already active (not stopped).
+    if (isActive()) {
+        m_renderState = RenderStarted;
+
+        // If the clock position changes while the clock is active, it
+        // is a seek request. We need to flush all pending samples.
+        if (clockStartOffset != PRESENTATION_CURRENT_POSITION)
+            flush();
+    } else {
+        m_renderState = RenderStarted;
+
+        // The clock has started from the stopped state.
+
+        // Possibly we are in the middle of frame-stepping OR have samples waiting
+        // in the frame-step queue. Deal with these two cases first:
+        hr = startFrameStep();
+        if (FAILED(hr))
+            return hr;
+    }
+
+    // Start the video surface in the main thread
+    if (thread() == QThread::currentThread())
+        startSurface();
+    else
+        QMetaObject::invokeMethod(this, "startSurface", Qt::QueuedConnection);
+
+    // Now try to get new output samples from the mixer.
+    processOutputLoop();
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockRestart(MFTIME)
+{
+    QMutexLocker locker(&m_mutex);
+
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    // The EVR calls OnClockRestart only while paused.
+
+    m_renderState = RenderStarted;
+
+    // Possibly we are in the middle of frame-stepping OR we have samples waiting
+    // in the frame-step queue. Deal with these two cases first:
+    hr = startFrameStep();
+    if (FAILED(hr))
+        return hr;
+
+    // Now resume the presentation loop.
+    processOutputLoop();
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
+{
+    QMutexLocker locker(&m_mutex);
+
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    if (m_renderState != RenderStopped) {
+        m_renderState = RenderStopped;
+        flush();
+
+        // If we are in the middle of frame-stepping, cancel it now.
+        if (m_frameStep.state != FrameStepNone)
+            cancelFrameStep();
+    }
+
+    // Stop the video surface in the main thread
+    if (thread() == QThread::currentThread())
+        stopSurface();
+    else
+        QMetaObject::invokeMethod(this, "stopSurface", Qt::QueuedConnection);
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::OnClockPause(MFTIME)
+{
+    QMutexLocker locker(&m_mutex);
+
+    // We cannot pause the clock after shutdown.
+    HRESULT hr = checkShutdown();
+
+    if (SUCCEEDED(hr))
+        m_renderState = RenderPaused;
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate)
+{
+    // Note:
+    // The presenter reports its maximum rate through the IMFRateSupport interface.
+    // Here, we assume that the EVR honors the maximum rate.
+
+    QMutexLocker locker(&m_mutex);
+
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    // If the rate is changing from zero (scrubbing) to non-zero, cancel the
+    // frame-step operation.
+    if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
+        cancelFrameStep();
+        for (int i = 0; i < m_frameStep.samples.size(); ++i)
+            m_frameStep.samples[i]->Release();
+        m_frameStep.samples.clear();
+    }
+
+    m_playbackRate = rate;
+
+    // Tell the scheduler about the new rate.
+    m_scheduler.setClockRate(rate);
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
+{
+    if (!rate)
+        return E_POINTER;
+
+    QMutexLocker locker(&m_mutex);
+
+    HRESULT hr = checkShutdown();
+
+    if (SUCCEEDED(hr)) {
+        // There is no minimum playback rate, so the minimum is zero.
+        *rate = 0;
+    }
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
+{
+    if (!rate)
+        return E_POINTER;
+
+    QMutexLocker locker(&m_mutex);
+
+    float maxRate = 0.0f;
+
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    // Get the maximum *forward* rate.
+    maxRate = getMaxRate(thin);
+
+    // For reverse playback, it's the negative of maxRate.
+    if (direction == MFRATE_REVERSE)
+        maxRate = -maxRate;
+
+    *rate = maxRate;
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
+{
+    QMutexLocker locker(&m_mutex);
+
+    float maxRate = 0.0f;
+    float nearestRate = rate;  // If we support rate, that is the nearest.
+
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        return hr;
+
+    // Find the maximum forward rate.
+    // Note: We have no minimum rate (that is, we support anything down to 0).
+    maxRate = getMaxRate(thin);
+
+    if (fabsf(rate) > maxRate) {
+        // The (absolute) requested rate exceeds the maximum rate.
+        hr = MF_E_UNSUPPORTED_RATE;
+
+        // The nearest supported rate is maxRate.
+        nearestRate = maxRate;
+        if (rate < 0) {
+            // Negative for reverse playback.
+            nearestRate = -nearestRate;
+        }
+    }
+
+    // Return the nearest supported rate.
+    if (nearestSupportedRate)
+        *nearestSupportedRate = nearestRate;
+
+    return hr;
+}
+
+void EVRCustomPresenter::supportedFormatsChanged()
+{
+    QMutexLocker locker(&m_mutex);
+
+    m_supportedGLFormats.clear();
+    if (!m_surface)
+        return;
+
+    QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle);
+    for (int i = 0; i < formats.size(); ++i) {
+        DWORD fourCC = getFourCCFromPixelFormat(formats.at(i));
+        if (fourCC)
+            m_supportedGLFormats.append(fourCC);
+    }
+}
+
+void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface)
+{
+    m_mutex.lock();
+
+    m_surface = surface;
+
+    if (m_D3DPresentEngine)
+        m_D3DPresentEngine->setSurface(surface);
+
+    m_mutex.unlock();
+
+    supportedFormatsChanged();
+}
+
+HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
+{
+    // Set the zoom rectangle (ie, the source clipping rectangle).
+    return setMixerSourceRect(mixer, m_sourceRect);
+}
+
+HRESULT EVRCustomPresenter::renegotiateMediaType()
+{
+    HRESULT hr = S_OK;
+    bool foundMediaType = false;
+
+    IMFMediaType *mixerType = NULL;
+    IMFMediaType *optimalType = NULL;
+
+    if (!m_mixer)
+        return MF_E_INVALIDREQUEST;
+
+    // Loop through all of the mixer's proposed output types.
+    DWORD typeIndex = 0;
+    while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
+        qt_wmf_safeRelease(&mixerType);
+        qt_wmf_safeRelease(&optimalType);
+
+        // Step 1. Get the next media type supported by mixer.
+        hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
+        if (FAILED(hr))
+            break;
+
+        // From now on, if anything in this loop fails, try the next type,
+        // until we succeed or the mixer runs out of types.
+
+        // Step 2. Check if we support this media type.
+        if (SUCCEEDED(hr))
+            hr = isMediaTypeSupported(mixerType);
+
+        // Step 3. Adjust the mixer's type to match our requirements.
+        if (SUCCEEDED(hr))
+            hr = createOptimalVideoType(mixerType, &optimalType);
+
+        // Step 4. Check if the mixer will accept this media type.
+        if (SUCCEEDED(hr))
+            hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
+
+        // Step 5. Try to set the media type on ourselves.
+        if (SUCCEEDED(hr))
+            hr = setMediaType(optimalType);
+
+        // Step 6. Set output media type on mixer.
+        if (SUCCEEDED(hr)) {
+            hr = m_mixer->SetOutputType(0, optimalType, 0);
+
+            // If something went wrong, clear the media type.
+            if (FAILED(hr))
+                setMediaType(NULL);
+        }
+
+        if (SUCCEEDED(hr))
+            foundMediaType = true;
+    }
+
+    qt_wmf_safeRelease(&mixerType);
+    qt_wmf_safeRelease(&optimalType);
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::flush()
+{
+    m_prerolled = false;
+
+    // The scheduler might have samples that are waiting for
+    // their presentation time. Tell the scheduler to flush.
+
+    // This call blocks until the scheduler threads discards all scheduled samples.
+    m_scheduler.flush();
+
+    // Flush the frame-step queue.
+    for (int i = 0; i < m_frameStep.samples.size(); ++i)
+        m_frameStep.samples[i]->Release();
+    m_frameStep.samples.clear();
+
+    if (m_renderState == RenderStopped) {
+        // Repaint with black.
+        QMetaObject::invokeMethod(m_D3DPresentEngine,
+                                  "presentSample",
+                                  Qt::QueuedConnection,
+                                  Q_ARG(void*, 0),
+                                  Q_ARG(qint64, 0));
+    }
+
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::processInputNotify()
+{
+    HRESULT hr = S_OK;
+
+    // Set the flag that says the mixer has a new sample.
+    m_sampleNotify = true;
+
+    if (!m_mediaType) {
+        // We don't have a valid media type yet.
+        hr = MF_E_TRANSFORM_TYPE_NOT_SET;
+    } else {
+        // Try to process an output sample.
+        processOutputLoop();
+    }
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::beginStreaming()
+{
+    HRESULT hr = S_OK;
+
+    // Start the scheduler thread.
+    hr = m_scheduler.startScheduler(m_clock);
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::endStreaming()
+{
+    HRESULT hr = S_OK;
+
+    // Stop the scheduler thread.
+    hr = m_scheduler.stopScheduler();
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::checkEndOfStream()
+{
+    if (!m_endStreaming) {
+        // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
+        return S_OK;
+    }
+
+    if (m_sampleNotify) {
+        // The mixer still has input.
+        return S_OK;
+    }
+
+    if (m_samplePool.areSamplesPending()) {
+        // Samples are still scheduled for rendering.
+        return S_OK;
+    }
+
+    // Everything is complete. Now we can tell the EVR that we are done.
+    notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
+    m_endStreaming = false;
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
+{
+    HRESULT hr = S_OK;
+
+    // Cache the step count.
+    m_frameStep.steps += steps;
+
+    // Set the frame-step state.
+    m_frameStep.state = FrameStepWaitingStart;
+
+    // If the clock is are already running, we can start frame-stepping now.
+    // Otherwise, we will start when the clock starts.
+    if (m_renderState == RenderStarted)
+        hr = startFrameStep();
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::startFrameStep()
+{
+    HRESULT hr = S_OK;
+    IMFSample *sample = NULL;
+
+    if (m_frameStep.state == FrameStepWaitingStart) {
+        // We have a frame-step request, and are waiting for the clock to start.
+        // Set the state to "pending," which means we are waiting for samples.
+        m_frameStep.state = FrameStepPending;
+
+        // If the frame-step queue already has samples, process them now.
+        while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
+            sample = m_frameStep.samples.takeFirst();
+
+            hr = deliverFrameStepSample(sample);
+            if (FAILED(hr))
+                goto done;
+
+            qt_wmf_safeRelease(&sample);
+
+            // We break from this loop when:
+            //   (a) the frame-step queue is empty, or
+            //   (b) the frame-step operation is complete.
+        }
+    } else if (m_frameStep.state == FrameStepNone) {
+        // We are not frame stepping. Therefore, if the frame-step queue has samples,
+        // we need to process them normally.
+        while (!m_frameStep.samples.isEmpty()) {
+            sample = m_frameStep.samples.takeFirst();
+
+            hr = deliverSample(sample, false);
+            if (FAILED(hr))
+                goto done;
+
+            qt_wmf_safeRelease(&sample);
+        }
+    }
+
+done:
+    qt_wmf_safeRelease(&sample);
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample)
+{
+    HRESULT hr = S_OK;
+    MFTIME sampleTime = 0;
+    MFTIME systemTime = 0;
+
+    // Update our state.
+    m_frameStep.state = FrameStepComplete;
+    m_frameStep.sampleNoRef = NULL;
+
+    // Notify the EVR that the frame-step is complete.
+    notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
+
+    // If we are scrubbing (rate == 0), also send the "scrub time" event.
+    if (isScrubbing()) {
+        // Get the time stamp from the sample.
+        hr = sample->GetSampleTime(&sampleTime);
+        if (FAILED(hr)) {
+            // No time stamp. Use the current presentation time.
+            if (m_clock)
+                m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
+
+            hr = S_OK; // (Not an error condition.)
+        }
+
+        notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
+    }
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::cancelFrameStep()
+{
+    FrameStepState oldState = m_frameStep.state;
+
+    m_frameStep.state = FrameStepNone;
+    m_frameStep.steps = 0;
+    m_frameStep.sampleNoRef = NULL;
+    // Don't clear the frame-step queue yet, because we might frame step again.
+
+    if (oldState > FrameStepNone && oldState < FrameStepComplete) {
+        // We were in the middle of frame-stepping when it was cancelled.
+        // Notify the EVR.
+        notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
+    }
+    return S_OK;
+}
+
+HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
+{
+    HRESULT hr = S_OK;
+
+    RECT rcOutput;
+    ZeroMemory(&rcOutput, sizeof(rcOutput));
+
+    MFVideoArea displayArea;
+    ZeroMemory(&displayArea, sizeof(displayArea));
+
+    IMFMediaType *mtOptimal = NULL;
+
+    // Clone the proposed type.
+
+    hr = MFCreateMediaType(&mtOptimal);
+    if (FAILED(hr))
+        goto done;
+
+    hr = proposedType->CopyAllItems(mtOptimal);
+    if (FAILED(hr))
+        goto done;
+
+    // Modify the new type.
+
+    // Set the pixel aspect ratio (PAR) to 1:1 (see assumption #1, above)
+    hr = MFSetAttributeRatio(mtOptimal, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
+    if (FAILED(hr))
+        goto done;
+
+    UINT64 size;
+    hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
+    int width = int(HI32(size));
+    int height = int(LO32(size));
+    rcOutput.left = 0;
+    rcOutput.top = 0;
+    rcOutput.right = width;
+    rcOutput.bottom = height;
+
+    // Set the geometric aperture, and disable pan/scan.
+    displayArea = qt_wmf_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom);
+
+    hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
+    if (FAILED(hr))
+        goto done;
+
+    hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
+    if (FAILED(hr))
+        goto done;
+
+    // Set the pan/scan aperture and the minimum display aperture. We don't care
+    // about them per se, but the mixer will reject the type if these exceed the
+    // frame dimentions.
+    hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
+    if (FAILED(hr))
+        goto done;
+
+    hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&displayArea, sizeof(displayArea));
+    if (FAILED(hr))
+        goto done;
+
+    // Return the pointer to the caller.
+    *optimalType = mtOptimal;
+    (*optimalType)->AddRef();
+
+done:
+    qt_wmf_safeRelease(&mtOptimal);
+    return hr;
+
+}
+
+HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
+{
+    // Note: mediaType can be NULL (to clear the type)
+
+    // Clearing the media type is allowed in any state (including shutdown).
+    if (!mediaType) {
+        qt_wmf_safeRelease(&m_mediaType);
+        releaseResources();
+        m_D3DPresentEngine->setSurfaceFormat(QVideoSurfaceFormat());
+        return S_OK;
+    }
+
+    MFRatio fps = { 0, 0 };
+    QList<IMFSample*> sampleQueue;
+
+    IMFSample *sample = NULL;
+
+    QVideoSurfaceFormat surfaceFormat;
+
+    // Cannot set the media type after shutdown.
+    HRESULT hr = checkShutdown();
+    if (FAILED(hr))
+        goto done;
+
+    // Check if the new type is actually different.
+    // Note: This function safely handles NULL input parameters.
+    if (qt_wmf_areMediaTypesEqual(m_mediaType, mediaType))
+        goto done; // Nothing more to do.
+
+    // We're really changing the type. First get rid of the old type.
+    qt_wmf_safeRelease(&m_mediaType);
+    releaseResources();
+
+    // Initialize the presenter engine with the new media type.
+    // The presenter engine allocates the samples.
+
+    hr = m_D3DPresentEngine->createVideoSamples(mediaType, sampleQueue);
+    if (FAILED(hr))
+        goto done;
+
+    // Mark each sample with our token counter. If this batch of samples becomes
+    // invalid, we increment the counter, so that we know they should be discarded.
+    for (int i = 0; i < sampleQueue.size(); ++i) {
+        sample = sampleQueue.at(i);
+
+        hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
+        if (FAILED(hr))
+            goto done;
+    }
+
+    // Add the samples to the sample pool.
+    hr = m_samplePool.initialize(sampleQueue);
+    if (FAILED(hr))
+        goto done;
+
+    // Set the frame rate on the scheduler.
+    if (SUCCEEDED(qt_wmf_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
+        m_scheduler.setFrameRate(fps);
+    } else {
+        // NOTE: The mixer's proposed type might not have a frame rate, in which case
+        // we'll use an arbitrary default. (Although it's unlikely the video source
+        // does not have a frame rate.)
+        m_scheduler.setFrameRate(g_DefaultFrameRate);
+    }
+
+    // Store the media type.
+    m_mediaType = mediaType;
+    m_mediaType->AddRef();
+
+    // Create the surface format
+    UINT64 size;
+    hr = m_mediaType->GetUINT64(MF_MT_FRAME_SIZE, &size);
+    int width = int(HI32(size));
+    int height = int(LO32(size));
+    surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
+                                        pixelFormatFromMediaType(m_mediaType),
+                                        QAbstractVideoBuffer::GLTextureHandle);
+    m_D3DPresentEngine->setSurfaceFormat(surfaceFormat);
+
+done:
+    if (FAILED(hr))
+        releaseResources();
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
+{
+    D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
+    BOOL compressed = FALSE;
+    MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
+    MFVideoArea videoCropArea;
+    UINT32 width = 0, height = 0;
+
+    // Validate the format.
+    HRESULT hr = qt_wmf_getFourCC(proposed, (DWORD*)&d3dFormat);
+    if (FAILED(hr))
+        return hr;
+
+    // Only accept pixel formats supported by the video surface
+    if (!m_supportedGLFormats.contains((DWORD)d3dFormat))
+        return MF_E_INVALIDMEDIATYPE;
+
+    // Reject compressed media types.
+    hr = proposed->IsCompressedFormat(&compressed);
+    if (FAILED(hr))
+        return hr;
+
+    if (compressed)
+        return MF_E_INVALIDMEDIATYPE;
+
+    // The D3DPresentEngine checks whether the format can be used as
+    // the back-buffer format for the swap chains.
+    hr = m_D3DPresentEngine->checkFormat(d3dFormat);
+    if (FAILED(hr))
+        return hr;
+
+    // Reject interlaced formats.
+    hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlaceMode);
+    if (FAILED(hr))
+        return hr;
+
+    if (interlaceMode != MFVideoInterlace_Progressive)
+        return MF_E_INVALIDMEDIATYPE;
+
+    hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
+    if (FAILED(hr))
+        return hr;
+
+    // Validate the various apertures (cropping regions) against the frame size.
+    // Any of these apertures may be unspecified in the media type, in which case
+    // we ignore it. We just want to reject invalid apertures.
+
+    if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
+        hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
+
+    if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
+        hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
+
+    if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL)))
+        hr = qt_wmf_validateVideoArea(videoCropArea, width, height);
+
+    return hr;
+}
+
+void EVRCustomPresenter::processOutputLoop()
+{
+    HRESULT hr = S_OK;
+
+    // Process as many samples as possible.
+    while (hr == S_OK) {
+        // If the mixer doesn't have a new input sample, break from the loop.
+        if (!m_sampleNotify) {
+            hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
+            break;
+        }
+
+        // Try to process a sample.
+        hr = processOutput();
+
+        // NOTE: ProcessOutput can return S_FALSE to indicate it did not
+        // process a sample. If so, break out of the loop.
+    }
+
+    if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+        // The mixer has run out of input data. Check for end-of-stream.
+        checkEndOfStream();
+    }
+}
+
+HRESULT EVRCustomPresenter::processOutput()
+{
+    HRESULT hr = S_OK;
+    DWORD status = 0;
+    LONGLONG mixerStartTime = 0, mixerEndTime = 0;
+    MFTIME systemTime = 0;
+    BOOL repaint = m_repaint; // Temporarily store this state flag.
+
+    MFT_OUTPUT_DATA_BUFFER dataBuffer;
+    ZeroMemory(&dataBuffer, sizeof(dataBuffer));
+
+    IMFSample *sample = NULL;
+
+    // If the clock is not running, we present the first sample,
+    // and then don't present any more until the clock starts.
+
+    if ((m_renderState != RenderStarted) && !m_repaint && m_prerolled)
+        return S_FALSE;
+
+    // Make sure we have a pointer to the mixer.
+    if (!m_mixer)
+        return MF_E_INVALIDREQUEST;
+
+    // Try to get a free sample from the video sample pool.
+    hr = m_samplePool.getSample(&sample);
+    if (hr == MF_E_SAMPLEALLOCATOR_EMPTY) {
+        // No free samples. Try again when a sample is released.
+        return S_FALSE;
+    } else if (FAILED(hr)) {
+        return hr;
+    }
+
+    // From now on, we have a valid video sample pointer, where the mixer will
+    // write the video data.
+
+    if (m_repaint) {
+        // Repaint request. Ask the mixer for the most recent sample.
+        setDesiredSampleTime(sample, m_scheduler.lastSampleTime(), m_scheduler.frameDuration());
+
+        m_repaint = false; // OK to clear this flag now.
+    } else {
+        // Not a repaint request. Clear the desired sample time; the mixer will
+        // give us the next frame in the stream.
+        clearDesiredSampleTime(sample);
+
+        if (m_clock) {
+            // Latency: Record the starting time for ProcessOutput.
+            m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
+        }
+    }
+
+    // Now we are ready to get an output sample from the mixer.
+    dataBuffer.dwStreamID = 0;
+    dataBuffer.pSample = sample;
+    dataBuffer.dwStatus = 0;
+
+    hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
+
+    if (FAILED(hr)) {
+        // Return the sample to the pool.
+        HRESULT hr2 = m_samplePool.returnSample(sample);
+        if (FAILED(hr2)) {
+            hr = hr2;
+            goto done;
+        }
+        // Handle some known error codes from ProcessOutput.
+        if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
+            // The mixer's format is not set. Negotiate a new format.
+            hr = renegotiateMediaType();
+        } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+            // There was a dynamic media type change. Clear our media type.
+            setMediaType(NULL);
+        } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+            // The mixer needs more input.
+            // We have to wait for the mixer to get more input.
+            m_sampleNotify = false;
+        }
+    } else {
+        // We got an output sample from the mixer.
+
+        if (m_clock && !repaint) {
+            // Latency: Record the ending time for the ProcessOutput operation,
+            // and notify the EVR of the latency.
+
+            m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
+
+            LONGLONG latencyTime = mixerEndTime - mixerStartTime;
+            notifyEvent(EC_PROCESSING_LATENCY, (LONG_PTR)&latencyTime, 0);
+        }
+
+        // Set up notification for when the sample is released.
+        hr = trackSample(sample);
+        if (FAILED(hr))
+            goto done;
+
+        // Schedule the sample.
+        if ((m_frameStep.state == FrameStepNone) || repaint) {
+            hr = deliverSample(sample, repaint);
+            if (FAILED(hr))
+                goto done;
+        } else {
+            // We are frame-stepping (and this is not a repaint request).
+            hr = deliverFrameStepSample(sample);
+            if (FAILED(hr))
+                goto done;
+        }
+
+        m_prerolled = true; // We have presented at least one sample now.
+    }
+
+done:
+    qt_wmf_safeRelease(&sample);
+
+    // Important: Release any events returned from the ProcessOutput method.
+    qt_wmf_safeRelease(&dataBuffer.pEvents);
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::deliverSample(IMFSample *sample, bool repaint)
+{
+    // If we are not actively playing, OR we are scrubbing (rate = 0) OR this is a
+    // repaint request, then we need to present the sample immediately. Otherwise,
+    // schedule it normally.
+
+    bool presentNow = ((m_renderState != RenderStarted) ||  isScrubbing() || repaint);
+
+    HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
+
+    if (FAILED(hr)) {
+        // Notify the EVR that we have failed during streaming. The EVR will notify the
+        // pipeline.
+
+        notifyEvent(EC_ERRORABORT, hr, 0);
+    }
+
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample)
+{
+    HRESULT hr = S_OK;
+    IUnknown *unk = NULL;
+
+    // For rate 0, discard any sample that ends earlier than the clock time.
+    if (isScrubbing() && m_clock && qt_wmf_isSampleTimePassed(m_clock, sample)) {
+        // Discard this sample.
+    } else if (m_frameStep.state >= FrameStepScheduled) {
+        // A frame was already submitted. Put this sample on the frame-step queue,
+        // in case we are asked to step to the next frame. If frame-stepping is
+        // cancelled, this sample will be processed normally.
+        sample->AddRef();
+        m_frameStep.samples.append(sample);
+    } else {
+        // We're ready to frame-step.
+
+        // Decrement the number of steps.
+        if (m_frameStep.steps > 0)
+            m_frameStep.steps--;
+
+        if (m_frameStep.steps > 0) {
+            // This is not the last step. Discard this sample.
+        } else if (m_frameStep.state == FrameStepWaitingStart) {
+            // This is the right frame, but the clock hasn't started yet. Put the
+            // sample on the frame-step queue. When the clock starts, the sample
+            // will be processed.
+            sample->AddRef();
+            m_frameStep.samples.append(sample);
+        } else {
+            // This is the right frame *and* the clock has started. Deliver this sample.
+            hr = deliverSample(sample, false);
+            if (FAILED(hr))
+                goto done;
+
+            // Query for IUnknown so that we can identify the sample later.
+            // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
+            hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+            if (FAILED(hr))
+                goto done;
+
+            m_frameStep.sampleNoRef = (DWORD_PTR)unk; // No add-ref.
+
+            // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
+            // sample from invoking the OnSampleFree callback after the sample is presented.
+            // We use this IUnknown pointer purely to identify the sample later; we never
+            // attempt to dereference the pointer.
+
+            m_frameStep.state = FrameStepScheduled;
+        }
+    }
+done:
+    qt_wmf_safeRelease(&unk);
+    return hr;
+}
+
+HRESULT EVRCustomPresenter::trackSample(IMFSample *sample)
+{
+    IMFTrackedSample *tracked = NULL;
+
+    HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
+
+    if (SUCCEEDED(hr))
+        hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
+
+    qt_wmf_safeRelease(&tracked);
+    return hr;
+}
+
+void EVRCustomPresenter::releaseResources()
+{
+    // Increment the token counter to indicate that all existing video samples
+    // are "stale." As these samples get released, we'll dispose of them.
+    //
+    // Note: The token counter is required because the samples are shared
+    // between more than one thread, and they are returned to the presenter
+    // through an asynchronous callback (onSampleFree). Without the token, we
+    // might accidentally re-use a stale sample after the ReleaseResources
+    // method returns.
+
+    m_tokenCounter++;
+
+    flush();
+
+    m_samplePool.clear();
+
+    m_D3DPresentEngine->releaseResources();
+}
+
+HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
+{
+    IUnknown *object = NULL;
+    IMFSample *sample = NULL;
+    IUnknown *unk = NULL;
+
+    // Get the sample from the async result object.
+    HRESULT hr = result->GetObject(&object);
+    if (FAILED(hr))
+        goto done;
+
+    hr = object->QueryInterface(IID_PPV_ARGS(&sample));
+    if (FAILED(hr))
+        goto done;
+
+    // If this sample was submitted for a frame-step, the frame step operation
+    // is complete.
+
+    if (m_frameStep.state == FrameStepScheduled) {
+        // Query the sample for IUnknown and compare it to our cached value.
+        hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+        if (FAILED(hr))
+            goto done;
+
+        if (m_frameStep.sampleNoRef == (DWORD_PTR)unk) {
+            // Notify the EVR.
+            hr = completeFrameStep(sample);
+            if (FAILED(hr))
+                goto done;
+        }
+
+        // Note: Although object is also an IUnknown pointer, it is not
+        // guaranteed to be the exact pointer value returned through
+        // QueryInterface. Therefore, the second QueryInterface call is
+        // required.
+    }
+
+    m_mutex.lock();
+
+    UINT32 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
+
+    if (token == m_tokenCounter) {
+        // Return the sample to the sample pool.
+        hr = m_samplePool.returnSample(sample);
+        if (SUCCEEDED(hr)) {
+            // A free sample is available. Process more data if possible.
+            processOutputLoop();
+        }
+    }
+
+    m_mutex.unlock();
+
+done:
+    if (FAILED(hr))
+        notifyEvent(EC_ERRORABORT, hr, 0);
+    qt_wmf_safeRelease(&object);
+    qt_wmf_safeRelease(&sample);
+    qt_wmf_safeRelease(&unk);
+    return hr;
+}
+
+void EVRCustomPresenter::startSurface()
+{
+    if (m_D3DPresentEngine)
+        m_D3DPresentEngine->start();
+}
+
+void EVRCustomPresenter::stopSurface()
+{
+    if (m_D3DPresentEngine)
+        m_D3DPresentEngine->stop();
+}
+
+float EVRCustomPresenter::getMaxRate(bool thin)
+{
+    // Non-thinned:
+    // If we have a valid frame rate and a monitor refresh rate, the maximum
+    // playback rate is equal to the refresh rate. Otherwise, the maximum rate
+    // is unbounded (FLT_MAX).
+
+    // Thinned: The maximum rate is unbounded.
+
+    float maxRate = FLT_MAX;
+    MFRatio fps = { 0, 0 };
+    UINT monitorRateHz = 0;
+
+    if (!thin && m_mediaType) {
+        qt_wmf_getFrameRate(m_mediaType, &fps);
+        monitorRateHz = m_D3DPresentEngine->refreshRate();
+
+        if (fps.Denominator && fps.Numerator && monitorRateHz) {
+            // Max Rate = Refresh Rate / Frame Rate
+            maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
+        }
+    }
+
+    return maxRate;
+}
+
+HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration)
+{
+    if (!sample)
+        return E_POINTER;
+
+    HRESULT hr = S_OK;
+    IMFDesiredSample *desired = NULL;
+
+    hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
+    if (SUCCEEDED(hr))
+        desired->SetDesiredSampleTimeAndDuration(sampleTime, duration);
+
+    qt_wmf_safeRelease(&desired);
+    return hr;
+}
+
+HRESULT clearDesiredSampleTime(IMFSample *sample)
+{
+    if (!sample)
+        return E_POINTER;
+
+    HRESULT hr = S_OK;
+
+    IMFDesiredSample *desired = NULL;
+    IUnknown *unkSwapChain = NULL;
+
+    // We store some custom attributes on the sample, so we need to cache them
+    // and reset them.
+    //
+    // This works around the fact that IMFDesiredSample::Clear() removes all of the
+    // attributes from the sample.
+
+    UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
+
+    sample->GetUnknown(MFSamplePresenter_SampleSwapChain, IID_IUnknown, (void**)&unkSwapChain);
+
+    hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
+    if (SUCCEEDED(hr)) {
+        desired->Clear();
+
+        hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter);
+        if (FAILED(hr))
+            goto done;
+
+        if (unkSwapChain) {
+            hr = sample->SetUnknown(MFSamplePresenter_SampleSwapChain, unkSwapChain);
+            if (FAILED(hr))
+                goto done;
+        }
+    }
+
+done:
+    qt_wmf_safeRelease(&unkSwapChain);
+    qt_wmf_safeRelease(&desired);
+    return hr;
+}
+
+HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
+{
+    if (!mixer)
+        return E_POINTER;
+
+    IMFAttributes *attributes = NULL;
+
+    HRESULT hr = mixer->GetAttributes(&attributes);
+    if (SUCCEEDED(hr)) {
+        hr = attributes->SetBlob(VIDEO_ZOOM_RECT, (const UINT8*)&sourceRect, sizeof(sourceRect));
+        attributes->Release();
+    }
+    return hr;
+}
+
+DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat)
+{
+    DWORD fourCC = 0;
+    switch (pixelFormat) {
+        case QVideoFrame::Format_ARGB32:
+        case QVideoFrame::Format_ARGB32_Premultiplied:
+            fourCC = MFVideoFormat_ARGB32.Data1;
+            break;
+        case QVideoFrame::Format_RGB32:
+            fourCC = MFVideoFormat_RGB32.Data1;
+            break;
+        case QVideoFrame::Format_RGB24:
+            fourCC = MFVideoFormat_RGB24.Data1;
+            break;
+        case QVideoFrame::Format_RGB565:
+            fourCC = MFVideoFormat_RGB565.Data1;
+            break;
+        case QVideoFrame::Format_RGB555:
+            fourCC = MFVideoFormat_RGB555.Data1;
+            break;
+        case QVideoFrame::Format_AYUV444:
+        case QVideoFrame::Format_AYUV444_Premultiplied:
+            fourCC = MFVideoFormat_AYUV.Data1;
+            break;
+        case QVideoFrame::Format_YUV420P:
+            fourCC = MFVideoFormat_I420.Data1;
+            break;
+        case QVideoFrame::Format_UYVY:
+            fourCC = MFVideoFormat_UYVY.Data1;
+            break;
+        case QVideoFrame::Format_YV12:
+            fourCC = MFVideoFormat_YV12.Data1;
+            break;
+        case QVideoFrame::Format_NV12:
+            fourCC = MFVideoFormat_NV12.Data1;
+            break;
+        default:
+            break;
+    }
+    return fourCC;
+}
+
+static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+{
+    GUID majorType;
+    if (FAILED(type->GetMajorType(&majorType)))
+        return QVideoFrame::Format_Invalid;
+    if (majorType != MFMediaType_Video)
+        return QVideoFrame::Format_Invalid;
+
+    GUID subType;
+    if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subType)))
+        return QVideoFrame::Format_Invalid;
+
+    if (subType == MFVideoFormat_RGB32)
+        return QVideoFrame::Format_RGB32;
+
+    return QVideoFrame::Format_Invalid;
+}
+
+
+EVRCustomPresenterActivate::EVRCustomPresenterActivate()
+    : MFAbstractActivate()
+    , m_presenter(0)
+    , m_surface(0)
+{ }
+
+HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
+{
+    if (!ppv)
+        return E_INVALIDARG;
+    QMutexLocker locker(&m_mutex);
+    if (!m_presenter) {
+        m_presenter = new EVRCustomPresenter;
+        if (m_surface)
+            m_presenter->setSurface(m_surface);
+    }
+    return m_presenter->QueryInterface(riid, ppv);
+}
+
+HRESULT EVRCustomPresenterActivate::ShutdownObject()
+{
+    // The presenter does not implement IMFShutdown so
+    // this function is the same as DetachObject()
+    return DetachObject();
+}
+
+HRESULT EVRCustomPresenterActivate::DetachObject()
+{
+    QMutexLocker locker(&m_mutex);
+    if (m_presenter) {
+        m_presenter->Release();
+        m_presenter = 0;
+    }
+    return S_OK;
+}
+
+void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface)
+{
+    QMutexLocker locker(&m_mutex);
+    if (m_surface == surface)
+        return;
+
+    m_surface = surface;
+
+    if (m_presenter)
+        m_presenter->setSurface(surface);
+}
+
+void EVRCustomPresenterActivate::supportedFormatsChanged()
+{
+    QMutexLocker locker(&m_mutex);
+
+    if (m_presenter)
+        m_presenter->supportedFormatsChanged();
+}
diff --git a/src/plugins/wmf/evrcustompresenter.h b/src/plugins/wmf/evrcustompresenter.h
new file mode 100644 (file)
index 0000000..519e9a7
--- /dev/null
@@ -0,0 +1,333 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef EVRCUSTOMPRESENTER_H
+#define EVRCUSTOMPRESENTER_H
+
+#include <QObject>
+#include <qmutex.h>
+#include <qqueue.h>
+#include <evr.h>
+#include "mfactivate.h"
+
+QT_BEGIN_NAMESPACE
+
+class D3DPresentEngine;
+class QAbstractVideoSurface;
+
+class Scheduler
+{
+public:
+    enum ScheduleEvent
+    {
+        Terminate =    WM_USER,
+        Schedule =     WM_USER + 1,
+        Flush =        WM_USER + 2
+    };
+
+    Scheduler();
+    ~Scheduler();
+
+    void setCallback(QObject *cb) {
+        m_CB = cb;
+    }
+
+    void setFrameRate(const MFRatio &fps);
+    void setClockRate(float rate) { m_playbackRate = rate; }
+
+    const LONGLONG &lastSampleTime() const { return m_lastSampleTime; }
+    const LONGLONG &frameDuration() const { return m_perFrameInterval; }
+
+    HRESULT startScheduler(IMFClock *clock);
+    HRESULT stopScheduler();
+
+    HRESULT scheduleSample(IMFSample *sample, bool presentNow);
+    HRESULT processSamplesInQueue(LONG *nextSleep);
+    HRESULT processSample(IMFSample *sample, LONG *nextSleep);
+    HRESULT flush();
+
+    // ThreadProc for the scheduler thread.
+    static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
+
+private:
+    DWORD schedulerThreadProcPrivate();
+
+    QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented.
+
+    IMFClock *m_clock; // Presentation clock. Can be NULL.
+    QObject *m_CB; // Weak reference; do not delete.
+
+    DWORD m_threadID;
+    HANDLE m_schedulerThread;
+    HANDLE m_threadReadyEvent;
+    HANDLE m_flushEvent;
+
+    float m_playbackRate;
+    MFTIME m_perFrameInterval; // Duration of each frame.
+    LONGLONG m_perFrame_1_4th; // 1/4th of the frame duration.
+    MFTIME m_lastSampleTime; // Most recent sample time.
+
+    QMutex m_mutex;
+};
+
+class SamplePool
+{
+public:
+    SamplePool();
+    ~SamplePool();
+
+    HRESULT initialize(QList<IMFSample*> &samples);
+    HRESULT clear();
+
+    HRESULT getSample(IMFSample **sample);
+    HRESULT returnSample(IMFSample *sample);
+    BOOL areSamplesPending();
+
+private:
+    QMutex m_mutex;
+    QList<IMFSample*> m_videoSampleQueue;
+    bool m_initialized;
+    DWORD m_pending;
+};
+
+class EVRCustomPresenter
+        : public QObject
+        , public IMFVideoDeviceID
+        , public IMFVideoPresenter // Inherits IMFClockStateSink
+        , public IMFRateSupport
+        , public IMFGetService
+        , public IMFTopologyServiceLookupClient
+{
+    Q_OBJECT
+
+public:
+    // Defines the state of the presenter.
+    enum RenderState
+    {
+        RenderStarted = 1,
+        RenderStopped,
+        RenderPaused,
+        RenderShutdown  // Initial state.
+    };
+
+    // Defines the presenter's state with respect to frame-stepping.
+    enum FrameStepState
+    {
+        FrameStepNone,             // Not frame stepping.
+        FrameStepWaitingStart,     // Frame stepping, but the clock is not started.
+        FrameStepPending,          // Clock is started. Waiting for samples.
+        FrameStepScheduled,        // Submitted a sample for rendering.
+        FrameStepComplete          // Sample was rendered.
+    };
+
+    EVRCustomPresenter();
+    ~EVRCustomPresenter();
+
+    // IUnknown methods
+    STDMETHODIMP QueryInterface(REFIID riid, void ** ppv);
+    STDMETHODIMP_(ULONG) AddRef();
+    STDMETHODIMP_(ULONG) Release();
+
+    // IMFGetService methods
+    STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject);
+
+    // IMFVideoPresenter methods
+    STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param);
+    STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType);
+
+    // IMFClockStateSink methods
+    STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset);
+    STDMETHODIMP OnClockStop(MFTIME systemTime);
+    STDMETHODIMP OnClockPause(MFTIME systemTime);
+    STDMETHODIMP OnClockRestart(MFTIME systemTime);
+    STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate);
+
+    // IMFRateSupport methods
+    STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
+    STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate);
+    STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate);
+
+    // IMFVideoDeviceID methods
+    STDMETHODIMP GetDeviceID(IID* deviceID);
+
+    // IMFTopologyServiceLookupClient methods
+    STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup);
+    STDMETHODIMP ReleaseServicePointers();
+
+    void supportedFormatsChanged();
+    void setSurface(QAbstractVideoSurface *surface);
+
+private Q_SLOTS:
+    void startSurface();
+    void stopSurface();
+
+private:
+    HRESULT checkShutdown() const
+    {
+        if (m_renderState == RenderShutdown)
+            return MF_E_SHUTDOWN;
+        else
+            return S_OK;
+    }
+
+    // The "active" state is started or paused.
+    inline bool isActive() const
+    {
+        return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
+    }
+
+    // Scrubbing occurs when the frame rate is 0.
+    inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
+
+    // Send an event to the EVR through its IMediaEventSink interface.
+    void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
+    {
+        if (m_mediaEventSink)
+            m_mediaEventSink->Notify(eventCode, param1, param2);
+    }
+
+    float getMaxRate(bool thin);
+
+    // Mixer operations
+    HRESULT configureMixer(IMFTransform *mixer);
+
+    // Formats
+    HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
+    HRESULT setMediaType(IMFMediaType *mediaType);
+    HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
+
+    // Message handlers
+    HRESULT flush();
+    HRESULT renegotiateMediaType();
+    HRESULT processInputNotify();
+    HRESULT beginStreaming();
+    HRESULT endStreaming();
+    HRESULT checkEndOfStream();
+
+    // Managing samples
+    void processOutputLoop();
+    HRESULT processOutput();
+    HRESULT deliverSample(IMFSample *sample, bool repaint);
+    HRESULT trackSample(IMFSample *sample);
+    void releaseResources();
+
+    // Frame-stepping
+    HRESULT prepareFrameStep(DWORD steps);
+    HRESULT startFrameStep();
+    HRESULT deliverFrameStepSample(IMFSample *sample);
+    HRESULT completeFrameStep(IMFSample *sample);
+    HRESULT cancelFrameStep();
+
+    // Callback when a video sample is released.
+    HRESULT onSampleFree(IMFAsyncResult *result);
+    AsyncCallback<EVRCustomPresenter>   m_sampleFreeCB;
+
+    // Holds information related to frame-stepping.
+    struct FrameStep
+    {
+        FrameStep()
+            : state(FrameStepNone)
+            , steps(0)
+            , sampleNoRef(NULL)
+        {
+        }
+
+        FrameStepState state;
+        QList<IMFSample*> samples;
+        DWORD steps;
+        DWORD_PTR sampleNoRef;
+    };
+
+    long m_refCount;
+
+    RenderState m_renderState;
+    FrameStep m_frameStep;
+
+    QMutex m_mutex;
+
+    // Samples and scheduling
+    Scheduler m_scheduler; // Manages scheduling of samples.
+    SamplePool m_samplePool; // Pool of allocated samples.
+    DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
+
+    // Rendering state
+    bool m_sampleNotify; // Did the mixer signal it has an input sample?
+    bool m_repaint; // Do we need to repaint the last sample?
+    bool m_prerolled; // Have we presented at least one sample?
+    bool m_endStreaming; // Did we reach the end of the stream (EOS)?
+
+    MFVideoNormalizedRect m_sourceRect;
+    float m_playbackRate;
+
+    D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.)
+
+    IMFClock *m_clock; // The EVR's clock.
+    IMFTransform *m_mixer; // The EVR's mixer.
+    IMediaEventSink *m_mediaEventSink; // The EVR's event-sink interface.
+    IMFMediaType *m_mediaType; // Output media type
+
+    QAbstractVideoSurface *m_surface;
+    QList<DWORD> m_supportedGLFormats;
+};
+
+class EVRCustomPresenterActivate : public MFAbstractActivate
+{
+public:
+    EVRCustomPresenterActivate();
+    ~EVRCustomPresenterActivate()
+    { }
+
+    STDMETHODIMP ActivateObject(REFIID riid, void **ppv);
+    STDMETHODIMP ShutdownObject();
+    STDMETHODIMP DetachObject();
+
+    void setSurface(QAbstractVideoSurface *surface);
+    void supportedFormatsChanged();
+
+private:
+    EVRCustomPresenter *m_presenter;
+    QAbstractVideoSurface *m_surface;
+    QMutex m_mutex;
+};
+
+QT_END_NAMESPACE
+
+#endif // EVRCUSTOMPRESENTER_H
diff --git a/src/plugins/wmf/evrd3dpresentengine.cpp b/src/plugins/wmf/evrd3dpresentengine.cpp
new file mode 100644 (file)
index 0000000..c67b5d4
--- /dev/null
@@ -0,0 +1,580 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "evrd3dpresentengine.h"
+
+#include "mfglobal.h"
+
+#include <qtgui/qguiapplication.h>
+#include <qpa/qplatformnativeinterface.h>
+#include <qtgui/qopenglcontext.h>
+#include <qabstractvideobuffer.h>
+#include <QAbstractVideoSurface>
+#include <qvideoframe.h>
+#include <QDebug>
+#include <qopenglcontext.h>
+#include <qwindow.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <WinUser.h>
+#include <evr.h>
+
+QT_USE_NAMESPACE
+
+static const DWORD PRESENTER_BUFFER_COUNT = 3;
+
+class TextureVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+    TextureVideoBuffer(GLuint textureId)
+        : QAbstractVideoBuffer(GLTextureHandle)
+        , m_textureId(textureId)
+    {}
+
+    ~TextureVideoBuffer() {}
+
+    MapMode mapMode() const { return NotMapped; }
+    uchar *map(MapMode, int*, int*) { return 0; }
+    void unmap() {}
+
+    QVariant handle() const
+    {
+        return QVariant::fromValue<unsigned int>(m_textureId);
+    }
+
+private:
+    GLuint m_textureId;
+};
+
+
+D3DPresentEngine::D3DPresentEngine()
+    : QObject()
+    , m_mutex(QMutex::Recursive)
+    , m_deviceResetToken(0)
+    , m_D3D9(0)
+    , m_device(0)
+    , m_deviceManager(0)
+    , m_surface(0)
+    , m_glContext(0)
+    , m_offscreenSurface(0)
+    , m_eglDisplay(0)
+    , m_eglConfig(0)
+    , m_eglSurface(0)
+    , m_glTexture(0)
+    , m_texture(0)
+{
+    ZeroMemory(&m_displayMode, sizeof(m_displayMode));
+
+    HRESULT hr = initializeD3D();
+
+    if (SUCCEEDED(hr)) {
+       hr = createD3DDevice();
+       if (FAILED(hr))
+           qWarning("Failed to create D3D device");
+    } else {
+        qWarning("Failed to initialize D3D");
+    }
+}
+
+D3DPresentEngine::~D3DPresentEngine()
+{
+    qt_wmf_safeRelease(&m_texture);
+    qt_wmf_safeRelease(&m_device);
+    qt_wmf_safeRelease(&m_deviceManager);
+    qt_wmf_safeRelease(&m_D3D9);
+
+    if (m_eglSurface) {
+        eglReleaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
+        eglDestroySurface(m_eglDisplay, m_eglSurface);
+        m_eglSurface = NULL;
+    }
+    if (m_glTexture)
+        glDeleteTextures(1, &m_glTexture);
+
+    delete m_glContext;
+    delete m_offscreenSurface;
+}
+
+void D3DPresentEngine::start()
+{
+    QMutexLocker locker(&m_mutex);
+
+    if (!m_surfaceFormat.isValid())
+        return;
+
+    if (!m_texture)
+        createOffscreenTexture();
+
+    if (m_surface && !m_surface->isActive())
+        m_surface->start(m_surfaceFormat);
+}
+
+void D3DPresentEngine::stop()
+{
+    QMutexLocker locker(&m_mutex);
+    if (m_surface && m_surface->isActive())
+        m_surface->stop();
+}
+
+HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
+{
+    HRESULT hr = S_OK;
+
+    if (riid == __uuidof(IDirect3DDeviceManager9)) {
+        if (m_deviceManager == NULL) {
+            hr = MF_E_UNSUPPORTED_SERVICE;
+        } else {
+            *ppv = m_deviceManager;
+            m_deviceManager->AddRef();
+        }
+    } else {
+        hr = MF_E_UNSUPPORTED_SERVICE;
+    }
+
+    return hr;
+}
+
+HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
+{
+    HRESULT hr = S_OK;
+
+    UINT uAdapter = D3DADAPTER_DEFAULT;
+    D3DDEVTYPE type = D3DDEVTYPE_HAL;
+
+    D3DDISPLAYMODE mode;
+    D3DDEVICE_CREATION_PARAMETERS params;
+
+    // Our shared D3D/EGL surface only supports RGB32,
+    // reject all other formats
+    if (format != D3DFMT_X8R8G8B8)
+        return MF_E_INVALIDMEDIATYPE;
+
+    if (m_device) {
+        hr = m_device->GetCreationParameters(&params);
+        if (FAILED(hr))
+            return hr;
+
+        uAdapter = params.AdapterOrdinal;
+        type = params.DeviceType;
+    }
+
+    hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
+    if (FAILED(hr))
+        return hr;
+
+    return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE);
+}
+
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
+{
+    if (!format)
+        return MF_E_UNEXPECTED;
+
+    HRESULT hr = S_OK;
+    D3DPRESENT_PARAMETERS pp;
+
+    IDirect3DSwapChain9 *swapChain = NULL;
+    IMFSample *videoSample = NULL;
+
+    QMutexLocker locker(&m_mutex);
+
+    releaseResources();
+
+    // Get the swap chain parameters from the media type.
+    hr = getSwapChainPresentParameters(format, &pp);
+    if (FAILED(hr))
+        goto done;
+
+    // Create the video samples.
+    for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
+        // Create a new swap chain.
+        hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain);
+        if (FAILED(hr))
+            goto done;
+
+        // Create the video sample from the swap chain.
+        hr = createD3DSample(swapChain, &videoSample);
+        if (FAILED(hr))
+            goto done;
+
+        // Add it to the list.
+        videoSample->AddRef();
+        videoSampleQueue.append(videoSample);
+
+        // Set the swap chain pointer as a custom attribute on the sample. This keeps
+        // a reference count on the swap chain, so that the swap chain is kept alive
+        // for the duration of the sample's lifetime.
+        hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain);
+        if (FAILED(hr))
+            goto done;
+
+        qt_wmf_safeRelease(&videoSample);
+        qt_wmf_safeRelease(&swapChain);
+    }
+
+done:
+    if (FAILED(hr))
+        releaseResources();
+
+    qt_wmf_safeRelease(&swapChain);
+    qt_wmf_safeRelease(&videoSample);
+    return hr;
+}
+
+void D3DPresentEngine::releaseResources()
+{
+}
+
+void D3DPresentEngine::presentSample(void *opaque, qint64)
+{
+    HRESULT hr = S_OK;
+
+    IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
+    IMFMediaBuffer* buffer = NULL;
+    IDirect3DSurface9* surface = NULL;
+
+    if (sample) {
+        // Get the buffer from the sample.
+        hr = sample->GetBufferByIndex(0, &buffer);
+        if (FAILED(hr))
+            goto done;
+
+        // Get the surface from the buffer.
+        hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
+        if (FAILED(hr))
+            goto done;
+    }
+
+    if (surface && updateTexture(surface)) {
+        m_surface->present(QVideoFrame(new TextureVideoBuffer(m_glTexture),
+                                       m_surfaceFormat.frameSize(),
+                                       m_surfaceFormat.pixelFormat()));
+    }
+
+done:
+    qt_wmf_safeRelease(&surface);
+    qt_wmf_safeRelease(&buffer);
+    qt_wmf_safeRelease(&sample);
+}
+
+void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface)
+{
+    QMutexLocker locker(&m_mutex);
+    m_surface = surface;
+}
+
+void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format)
+{
+    QMutexLocker locker(&m_mutex);
+    m_surfaceFormat = format;
+}
+
+void D3DPresentEngine::createOffscreenTexture()
+{
+    // First, check if we have a context on this thread
+    QOpenGLContext *currentContext = QOpenGLContext::currentContext();
+
+    if (!currentContext) {
+        //Create OpenGL context and set share context from surface
+        QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
+        if (!shareContext)
+            return;
+
+        m_offscreenSurface = new QWindow;
+        m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
+        //Needs geometry to be a valid surface, but size is not important
+        m_offscreenSurface->setGeometry(-1, -1, 1, 1);
+        m_offscreenSurface->create();
+
+        m_glContext = new QOpenGLContext;
+        m_glContext->setFormat(m_offscreenSurface->requestedFormat());
+        m_glContext->setShareContext(shareContext);
+
+        if (!m_glContext->create()) {
+            delete m_glContext;
+            delete m_offscreenSurface;
+            m_glContext = 0;
+            m_offscreenSurface = 0;
+            return;
+        }
+
+        currentContext = m_glContext;
+    }
+
+    if (m_glContext)
+        m_glContext->makeCurrent(m_offscreenSurface);
+
+    QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface();
+    m_eglDisplay = static_cast<EGLDisplay*>(
+                nativeInterface->nativeResourceForContext("eglDisplay", currentContext));
+    m_eglConfig = static_cast<EGLDisplay*>(
+                nativeInterface->nativeResourceForContext("eglConfig", currentContext));
+
+    glGenTextures(1, &m_glTexture);
+
+
+    int w = m_surfaceFormat.frameWidth();
+    int h = m_surfaceFormat.frameHeight();
+
+    EGLint attribs[] = {
+        EGL_WIDTH, w,
+        EGL_HEIGHT, h,
+        EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGB,
+        EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
+        EGL_NONE
+    };
+
+    EGLSurface pbuffer = eglCreatePbufferSurface(m_eglDisplay, m_eglConfig, attribs);
+
+    HANDLE share_handle = 0;
+    PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE =
+            reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(eglGetProcAddress("eglQuerySurfacePointerANGLE"));
+    eglQuerySurfacePointerANGLE(
+                m_eglDisplay,
+                pbuffer,
+                EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle);
+
+
+    m_device->CreateTexture(w, h, 1,
+                            D3DUSAGE_RENDERTARGET,
+                            D3DFMT_X8R8G8B8,
+                            D3DPOOL_DEFAULT,
+                            &m_texture,
+                            &share_handle);
+
+    m_eglSurface = pbuffer;
+
+    if (m_glContext)
+        m_glContext->doneCurrent();
+}
+
+bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src)
+{
+    if (!m_texture)
+        return false;
+
+    if (m_glContext)
+        m_glContext->makeCurrent(m_offscreenSurface);
+
+    glBindTexture(GL_TEXTURE_2D, m_glTexture);
+
+    IDirect3DSurface9 *dest = NULL;
+
+    // Copy the sample surface to the shared D3D/EGL surface
+    HRESULT hr = m_texture->GetSurfaceLevel(0, &dest);
+    if (FAILED(hr))
+        goto done;
+
+    hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE);
+    if (FAILED(hr))
+        qWarning("Failed to copy D3D surface");
+
+    if (hr == S_OK)
+        eglBindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER);
+
+done:
+    qt_wmf_safeRelease(&dest);
+
+    if (m_glContext)
+        m_glContext->doneCurrent();
+
+    return SUCCEEDED(hr);
+}
+
+HRESULT D3DPresentEngine::initializeD3D()
+{
+    HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9);
+
+    if (SUCCEEDED(hr))
+        hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager);
+
+    return hr;
+}
+
+HRESULT D3DPresentEngine::createD3DDevice()
+{
+    HRESULT hr = S_OK;
+    HWND hwnd = NULL;
+    UINT uAdapterID = D3DADAPTER_DEFAULT;
+    DWORD vp = 0;
+
+    D3DCAPS9 ddCaps;
+    ZeroMemory(&ddCaps, sizeof(ddCaps));
+
+    IDirect3DDevice9Ex* device = NULL;
+
+    // Hold the lock because we might be discarding an existing device.
+    QMutexLocker locker(&m_mutex);
+
+    if (!m_D3D9 || !m_deviceManager)
+        return MF_E_NOT_INITIALIZED;
+
+    hwnd = ::GetShellWindow();
+
+    // Note: The presenter creates additional swap chains to present the
+    // video frames. Therefore, it does not use the device's implicit
+    // swap chain, so the size of the back buffer here is 1 x 1.
+
+    D3DPRESENT_PARAMETERS pp;
+    ZeroMemory(&pp, sizeof(pp));
+
+    pp.BackBufferWidth = 1;
+    pp.BackBufferHeight = 1;
+    pp.BackBufferFormat = D3DFMT_UNKNOWN;
+    pp.BackBufferCount = 1;
+    pp.Windowed = TRUE;
+    pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+    pp.BackBufferFormat = D3DFMT_UNKNOWN;
+    pp.hDeviceWindow = hwnd;
+    pp.Flags = D3DPRESENTFLAG_VIDEO;
+    pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
+
+    hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps);
+    if (FAILED(hr))
+        goto done;
+
+    if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+        vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+    else
+        vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+
+    hr = m_D3D9->CreateDeviceEx(
+                uAdapterID,
+                D3DDEVTYPE_HAL,
+                pp.hDeviceWindow,
+                vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
+                &pp,
+                NULL,
+                &device
+                );
+    if (FAILED(hr))
+        goto done;
+
+    hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode);
+    if (FAILED(hr))
+        goto done;
+
+    hr = m_deviceManager->ResetDevice(device, m_deviceResetToken);
+    if (FAILED(hr))
+        goto done;
+
+    qt_wmf_safeRelease(&m_device);
+
+    m_device = device;
+    m_device->AddRef();
+
+done:
+    qt_wmf_safeRelease(&device);
+    return hr;
+}
+
+HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample)
+{
+    D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00);
+
+    IDirect3DSurface9* surface = NULL;
+    IMFSample* sample = NULL;
+
+    // Get the back buffer surface.
+    HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface);
+    if (FAILED(hr))
+        goto done;
+
+    // Fill it with black.
+    hr = m_device->ColorFill(surface, NULL, clrBlack);
+    if (FAILED(hr))
+        goto done;
+
+    hr = MFCreateVideoSampleFromSurface(surface, &sample);
+    if (FAILED(hr))
+        goto done;
+
+    *videoSample = sample;
+    (*videoSample)->AddRef();
+
+done:
+    qt_wmf_safeRelease(&surface);
+    qt_wmf_safeRelease(&sample);
+    return hr;
+}
+
+HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp)
+{
+    ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
+
+    // Get some information about the video format.
+
+    UINT32 width = 0, height = 0;
+
+    HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height);
+    if (FAILED(hr))
+        return hr;
+
+    DWORD d3dFormat = 0;
+
+    hr = qt_wmf_getFourCC(type, &d3dFormat);
+    if (FAILED(hr))
+        return hr;
+
+    ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS));
+    pp->BackBufferWidth = width;
+    pp->BackBufferHeight = height;
+    pp->Windowed = TRUE;
+    pp->SwapEffect = D3DSWAPEFFECT_DISCARD;
+    pp->BackBufferFormat = (D3DFORMAT)d3dFormat;
+    pp->hDeviceWindow = ::GetShellWindow();
+    pp->Flags = D3DPRESENTFLAG_VIDEO;
+    pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
+
+    D3DDEVICE_CREATION_PARAMETERS params;
+    hr = m_device->GetCreationParameters(&params);
+    if (FAILED(hr))
+        return hr;
+
+    if (params.DeviceType != D3DDEVTYPE_HAL)
+        pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
+
+    return S_OK;
+}
diff --git a/src/plugins/wmf/evrd3dpresentengine.h b/src/plugins/wmf/evrd3dpresentengine.h
new file mode 100644 (file)
index 0000000..87b3e96
--- /dev/null
@@ -0,0 +1,130 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef EVRD3DPRESENTENGINE_H
+#define EVRD3DPRESENTENGINE_H
+
+#include <QObject>
+#include <EGL/egl.h>
+#include <QMutex>
+#include <d3d9types.h>
+#include <QVideoSurfaceFormat>
+
+struct IDirect3D9Ex;
+struct IDirect3DDevice9;
+struct IDirect3DDevice9Ex;
+struct IDirect3DDeviceManager9;
+struct IDirect3DSurface9;
+struct IDirect3DTexture9;
+struct IMFSample;
+struct IMFMediaType;
+struct IDirect3DSwapChain9;
+
+// Randomly generated GUIDs
+static const GUID MFSamplePresenter_SampleCounter =
+{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
+
+static const GUID MFSamplePresenter_SampleSwapChain =
+{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } };
+
+QT_BEGIN_NAMESPACE
+
+class QAbstractVideoSurface;
+class QOpenGLContext;
+
+class D3DPresentEngine : public QObject
+{
+    Q_OBJECT
+public:
+    D3DPresentEngine();
+    virtual ~D3DPresentEngine();
+
+    void start();
+    void stop();
+
+    HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
+    HRESULT checkFormat(D3DFORMAT format);
+
+    HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
+    void releaseResources();
+
+    UINT refreshRate() const { return m_displayMode.RefreshRate; }
+
+    void setSurface(QAbstractVideoSurface *surface);
+    void setSurfaceFormat(const QVideoSurfaceFormat &format);
+
+    void createOffscreenTexture();
+    bool updateTexture(IDirect3DSurface9 *src);
+
+public Q_SLOTS:
+    void presentSample(void* sample, qint64 llTarget);
+
+private:
+    HRESULT initializeD3D();
+    HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp);
+    HRESULT createD3DDevice();
+    HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample);
+
+    QMutex m_mutex;
+
+    UINT m_deviceResetToken;
+    D3DDISPLAYMODE m_displayMode;
+
+    IDirect3D9Ex *m_D3D9;
+    IDirect3DDevice9Ex *m_device;
+    IDirect3DDeviceManager9 *m_deviceManager;
+
+    QVideoSurfaceFormat m_surfaceFormat;
+    QAbstractVideoSurface *m_surface;
+
+    QOpenGLContext *m_glContext;
+    QWindow *m_offscreenSurface;
+
+    EGLDisplay *m_eglDisplay;
+    EGLConfig *m_eglConfig;
+    EGLSurface m_eglSurface;
+    unsigned int m_glTexture;
+    IDirect3DTexture9 *m_texture;
+};
+
+QT_END_NAMESPACE
+
+#endif // EVRD3DPRESENTENGINE_H
diff --git a/src/plugins/wmf/mfactivate.cpp b/src/plugins/wmf/mfactivate.cpp
new file mode 100644 (file)
index 0000000..cf64adc
--- /dev/null
@@ -0,0 +1,89 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "mfactivate.h"
+
+#include <mfapi.h>
+
+MFAbstractActivate::MFAbstractActivate()
+    : m_attributes(0)
+    , m_cRef(1)
+{
+    MFCreateAttributes(&m_attributes, 0);
+}
+
+MFAbstractActivate::~MFAbstractActivate()
+{
+    if (m_attributes)
+        m_attributes->Release();
+}
+
+
+HRESULT MFAbstractActivate::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+    if (!ppvObject)
+        return E_POINTER;
+    if (riid == IID_IMFActivate) {
+        *ppvObject = static_cast<IMFActivate*>(this);
+    } else if (riid == IID_IMFAttributes) {
+        *ppvObject = static_cast<IMFAttributes*>(this);
+    } else if (riid == IID_IUnknown) {
+        *ppvObject = static_cast<IUnknown*>(static_cast<IMFActivate*>(this));
+    } else {
+        *ppvObject =  NULL;
+        return E_NOINTERFACE;
+    }
+    AddRef();
+    return S_OK;
+}
+
+ULONG MFAbstractActivate::AddRef(void)
+{
+    return InterlockedIncrement(&m_cRef);
+}
+
+ULONG MFAbstractActivate::Release(void)
+{
+    ULONG cRef = InterlockedDecrement(&m_cRef);
+    if (cRef == 0)
+        delete this;
+    return m_cRef;
+}
diff --git a/src/plugins/wmf/mfactivate.h b/src/plugins/wmf/mfactivate.h
new file mode 100644 (file)
index 0000000..e021390
--- /dev/null
@@ -0,0 +1,216 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef MFACTIVATE_H
+#define MFACTIVATE_H
+
+#include "mfglobal.h"
+
+#include <mfidl.h>
+
+class MFAbstractActivate : public IMFActivate
+{
+public:
+    explicit MFAbstractActivate();
+    virtual ~MFAbstractActivate();
+
+    //from IUnknown
+    STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject);
+    STDMETHODIMP_(ULONG) AddRef(void);
+    STDMETHODIMP_(ULONG) Release(void);
+
+    //from IMFAttributes
+    STDMETHODIMP GetItem(REFGUID guidKey, PROPVARIANT *pValue)
+    {
+        return m_attributes->GetItem(guidKey, pValue);
+    }
+
+    STDMETHODIMP GetItemType(REFGUID guidKey, MF_ATTRIBUTE_TYPE *pType)
+    {
+        return m_attributes->GetItemType(guidKey, pType);
+    }
+
+    STDMETHODIMP CompareItem(REFGUID guidKey, REFPROPVARIANT Value, BOOL *pbResult)
+    {
+        return m_attributes->CompareItem(guidKey, Value, pbResult);
+    }
+
+    STDMETHODIMP Compare(IMFAttributes *pTheirs, MF_ATTRIBUTES_MATCH_TYPE MatchType, BOOL *pbResult)
+    {
+        return m_attributes->Compare(pTheirs, MatchType, pbResult);
+    }
+
+    STDMETHODIMP GetUINT32(REFGUID guidKey, UINT32 *punValue)
+    {
+        return m_attributes->GetUINT32(guidKey, punValue);
+    }
+
+    STDMETHODIMP GetUINT64(REFGUID guidKey, UINT64 *punValue)
+    {
+        return m_attributes->GetUINT64(guidKey, punValue);
+    }
+
+    STDMETHODIMP GetDouble(REFGUID guidKey, double *pfValue)
+    {
+        return m_attributes->GetDouble(guidKey, pfValue);
+    }
+
+    STDMETHODIMP GetGUID(REFGUID guidKey, GUID *pguidValue)
+    {
+        return m_attributes->GetGUID(guidKey, pguidValue);
+    }
+
+    STDMETHODIMP GetStringLength(REFGUID guidKey, UINT32 *pcchLength)
+    {
+        return m_attributes->GetStringLength(guidKey, pcchLength);
+    }
+
+    STDMETHODIMP GetString(REFGUID guidKey, LPWSTR pwszValue, UINT32 cchBufSize, UINT32 *pcchLength)
+    {
+        return m_attributes->GetString(guidKey, pwszValue, cchBufSize, pcchLength);
+    }
+
+    STDMETHODIMP GetAllocatedString(REFGUID guidKey, LPWSTR *ppwszValue, UINT32 *pcchLength)
+    {
+        return m_attributes->GetAllocatedString(guidKey, ppwszValue, pcchLength);
+    }
+
+    STDMETHODIMP GetBlobSize(REFGUID guidKey, UINT32 *pcbBlobSize)
+    {
+        return m_attributes->GetBlobSize(guidKey, pcbBlobSize);
+    }
+
+    STDMETHODIMP GetBlob(REFGUID guidKey, UINT8 *pBuf, UINT32 cbBufSize, UINT32 *pcbBlobSize)
+    {
+        return m_attributes->GetBlob(guidKey, pBuf, cbBufSize, pcbBlobSize);
+    }
+
+    STDMETHODIMP GetAllocatedBlob(REFGUID guidKey, UINT8 **ppBuf, UINT32 *pcbSize)
+    {
+        return m_attributes->GetAllocatedBlob(guidKey, ppBuf, pcbSize);
+    }
+
+    STDMETHODIMP GetUnknown(REFGUID guidKey, REFIID riid, LPVOID *ppv)
+    {
+        return m_attributes->GetUnknown(guidKey, riid, ppv);
+    }
+
+    STDMETHODIMP SetItem(REFGUID guidKey, REFPROPVARIANT Value)
+    {
+        return m_attributes->SetItem(guidKey, Value);
+    }
+
+    STDMETHODIMP DeleteItem(REFGUID guidKey)
+    {
+        return m_attributes->DeleteItem(guidKey);
+    }
+
+    STDMETHODIMP DeleteAllItems()
+    {
+        return m_attributes->DeleteAllItems();
+    }
+
+    STDMETHODIMP SetUINT32(REFGUID guidKey, UINT32 unValue)
+    {
+        return m_attributes->SetUINT32(guidKey, unValue);
+    }
+
+    STDMETHODIMP SetUINT64(REFGUID guidKey, UINT64 unValue)
+    {
+        return m_attributes->SetUINT64(guidKey, unValue);
+    }
+
+    STDMETHODIMP SetDouble(REFGUID guidKey, double fValue)
+     {
+        return m_attributes->SetDouble(guidKey, fValue);
+    }
+
+    STDMETHODIMP SetGUID(REFGUID guidKey, REFGUID guidValue)
+    {
+        return m_attributes->SetGUID(guidKey, guidValue);
+    }
+
+    STDMETHODIMP SetString(REFGUID guidKey, LPCWSTR wszValue)
+    {
+        return m_attributes->SetString(guidKey, wszValue);
+    }
+
+    STDMETHODIMP SetBlob(REFGUID guidKey, const UINT8 *pBuf, UINT32 cbBufSize)
+    {
+        return m_attributes->SetBlob(guidKey, pBuf, cbBufSize);
+    }
+
+    STDMETHODIMP SetUnknown(REFGUID guidKey, IUnknown *pUnknown)
+    {
+        return m_attributes->SetUnknown(guidKey, pUnknown);
+    }
+
+    STDMETHODIMP LockStore()
+    {
+        return m_attributes->LockStore();
+    }
+
+    STDMETHODIMP UnlockStore()
+    {
+        return m_attributes->UnlockStore();
+    }
+
+    STDMETHODIMP GetCount(UINT32 *pcItems)
+    {
+        return m_attributes->GetCount(pcItems);
+    }
+
+    STDMETHODIMP GetItemByIndex(UINT32 unIndex, GUID *pguidKey, PROPVARIANT *pValue)
+    {
+        return m_attributes->GetItemByIndex(unIndex, pguidKey, pValue);
+    }
+
+    STDMETHODIMP CopyAllItems(IMFAttributes *pDest)
+    {
+        return m_attributes->CopyAllItems(pDest);
+    }
+
+private:
+    IMFAttributes *m_attributes;
+    ULONG m_cRef;
+};
+
+#endif // MFACTIVATE_H
diff --git a/src/plugins/wmf/mfglobal.cpp b/src/plugins/wmf/mfglobal.cpp
new file mode 100644 (file)
index 0000000..b9b6096
--- /dev/null
@@ -0,0 +1,124 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "mfglobal.h"
+
+HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC)
+{
+    if (!fourCC)
+        return E_POINTER;
+
+    HRESULT hr = S_OK;
+    GUID guidSubType = GUID_NULL;
+
+    if (SUCCEEDED(hr))
+        hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
+
+    if (SUCCEEDED(hr))
+        *fourCC = guidSubType.Data1;
+
+    return hr;
+}
+
+MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type)
+{
+    MFRatio ratio = { 0, 0 };
+    HRESULT hr = S_OK;
+
+    hr = MFGetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&ratio.Numerator, (UINT32*)&ratio.Denominator);
+    if (FAILED(hr)) {
+        ratio.Numerator = 1;
+        ratio.Denominator = 1;
+    }
+    return ratio;
+}
+
+bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
+{
+    if (!type1 && !type2)
+        return true;
+    else if (!type1 || !type2)
+        return false;
+
+    DWORD dwFlags = 0;
+    HRESULT hr = type1->IsEqual(type2, &dwFlags);
+
+    return (hr == S_OK);
+}
+
+HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
+{
+    float fOffsetX = qt_wmf_MFOffsetToFloat(area.OffsetX);
+    float fOffsetY = qt_wmf_MFOffsetToFloat(area.OffsetY);
+
+    if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
+         ((LONG)fOffsetY + area.Area.cy > (LONG)height) )
+        return MF_E_INVALIDMEDIATYPE;
+    else
+        return S_OK;
+}
+
+bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
+{
+    if (!sample || !clock)
+        return false;
+
+    HRESULT hr = S_OK;
+    MFTIME hnsTimeNow = 0;
+    MFTIME hnsSystemTime = 0;
+    MFTIME hnsSampleStart = 0;
+    MFTIME hnsSampleDuration = 0;
+
+    hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+    if (SUCCEEDED(hr))
+        hr = sample->GetSampleTime(&hnsSampleStart);
+
+    if (SUCCEEDED(hr))
+        hr = sample->GetSampleDuration(&hnsSampleDuration);
+
+    if (SUCCEEDED(hr)) {
+        if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
+            return true;
+    }
+
+    return false;
+}
diff --git a/src/plugins/wmf/mfglobal.h b/src/plugins/wmf/mfglobal.h
new file mode 100644 (file)
index 0000000..9ab24e3
--- /dev/null
@@ -0,0 +1,157 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia.  For licensing terms and
+** conditions see http://qt.digia.com/licensing.  For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights.  These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file.  Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef MFGLOBAL_H
+#define MFGLOBAL_H
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+
+
+template<class T>
+class AsyncCallback : public IMFAsyncCallback
+{
+public:
+    typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
+
+    AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
+    {
+    }
+
+    // IUnknown
+    STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
+    {
+        if (!ppv)
+            return E_POINTER;
+
+        if (iid == __uuidof(IUnknown)) {
+            *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
+        } else if (iid == __uuidof(IMFAsyncCallback)) {
+            *ppv = static_cast<IMFAsyncCallback*>(this);
+        } else {
+            *ppv = NULL;
+            return E_NOINTERFACE;
+        }
+        AddRef();
+        return S_OK;
+    }
+
+    STDMETHODIMP_(ULONG) AddRef() {
+        // Delegate to parent class.
+        return m_parent->AddRef();
+    }
+    STDMETHODIMP_(ULONG) Release() {
+        // Delegate to parent class.
+        return m_parent->Release();
+    }
+
+
+    // IMFAsyncCallback methods
+    STDMETHODIMP GetParameters(DWORD*, DWORD*)
+    {
+        // Implementation of this method is optional.
+        return E_NOTIMPL;
+    }
+
+    STDMETHODIMP Invoke(IMFAsyncResult* asyncResult)
+    {
+        return (m_parent->*m_invokeFn)(asyncResult);
+    }
+
+    T *m_parent;
+    InvokeFn m_invokeFn;
+};
+
+template <class T> void qt_wmf_safeRelease(T **ppT)
+{
+    if (*ppT) {
+        (*ppT)->Release();
+        *ppT = NULL;
+    }
+}
+
+template <class T>
+void qt_wmf_copyComPointer(T* &dest, T *src)
+{
+    if (dest)
+        dest->Release();
+    dest = src;
+    if (dest)
+        dest->AddRef();
+}
+
+HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC);
+MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type);
+bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
+HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
+bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
+
+inline float qt_wmf_MFOffsetToFloat(const MFOffset& offset)
+{
+    return offset.value + (float(offset.fract) / 65536);
+}
+
+inline MFOffset qt_wmf_makeMFOffset(float v)
+{
+    MFOffset offset;
+    offset.value = short(v);
+    offset.fract = WORD(65536 * (v-offset.value));
+    return offset;
+}
+
+inline MFVideoArea qt_wmf_makeMFArea(float x, float y, DWORD width, DWORD height)
+{
+    MFVideoArea area;
+    area.OffsetX = qt_wmf_makeMFOffset(x);
+    area.OffsetY = qt_wmf_makeMFOffset(y);
+    area.Area.cx = width;
+    area.Area.cy = height;
+    return area;
+}
+
+inline HRESULT qt_wmf_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
+{
+    return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator);
+}
+
+
+#endif // MFGLOBAL_H
index efb0e96..528a5dc 100644 (file)
 ****************************************************************************/
 
 #include "mfvideorenderercontrol.h"
-#include <mferror.h>
+#include "mfglobal.h"
+#ifdef QT_OPENGL_ES_2_ANGLE
+#include "evrcustompresenter.h"
+#endif
 #include <qabstractvideosurface.h>
 #include <qvideosurfaceformat.h>
 #include <qtcore/qtimer.h>
@@ -114,67 +117,6 @@ namespace
         MapMode m_mapMode;
     };
 
-    template<class T>
-    class AsyncCallback : public IMFAsyncCallback
-    {
-    public:
-        typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *pAsyncResult);
-
-        AsyncCallback(T *pParent, InvokeFn fn) : m_pParent(pParent), m_pInvokeFn(fn)
-        {
-        }
-
-        // IUnknown
-        STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
-        {
-            if (!ppv)
-            {
-                return E_POINTER;
-            }
-            if (iid == __uuidof(IUnknown))
-            {
-                *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
-            }
-            else if (iid == __uuidof(IMFAsyncCallback))
-            {
-                *ppv = static_cast<IMFAsyncCallback*>(this);
-            }
-            else
-            {
-                *ppv = NULL;
-                return E_NOINTERFACE;
-            }
-            AddRef();
-            return S_OK;
-        }
-        STDMETHODIMP_(ULONG) AddRef()
-        {
-            // Delegate to parent class.
-            return m_pParent->AddRef();
-        }
-        STDMETHODIMP_(ULONG) Release()
-        {
-            // Delegate to parent class.
-            return m_pParent->Release();
-        }
-
-        // IMFAsyncCallback methods
-        STDMETHODIMP GetParameters(DWORD*, DWORD*)
-        {
-            // Implementation of this method is optional.
-            return E_NOTIMPL;
-        }
-
-        STDMETHODIMP Invoke(IMFAsyncResult* pAsyncResult)
-        {
-            return (m_pParent->*m_pInvokeFn)(pAsyncResult);
-        }
-
-        T *m_pParent;
-        InvokeFn m_pInvokeFn;
-    };
-
-
     // Custom interface for handling IMFStreamSink::PlaceMarker calls asynchronously.
     MIDL_INTERFACE("a3ff32de-1031-438a-8b47-82f8acda59b7")
     IMarker : public IUnknown
@@ -2134,6 +2076,9 @@ MFVideoRendererControl::MFVideoRendererControl(QObject *parent)
     , m_surface(0)
     , m_currentActivate(0)
     , m_callback(0)
+#ifdef QT_OPENGL_ES_2_ANGLE
+    , m_presenterActivate(0)
+#endif
 {
 }
 
@@ -2147,6 +2092,14 @@ void MFVideoRendererControl::clear()
     if (m_surface)
         m_surface->stop();
 
+#ifdef QT_OPENGL_ES_2_ANGLE
+    if (m_presenterActivate) {
+        m_presenterActivate->ShutdownObject();
+        m_presenterActivate->Release();
+        m_presenterActivate = NULL;
+    }
+#endif
+
     if (m_currentActivate) {
         m_currentActivate->ShutdownObject();
         m_currentActivate->Release();
@@ -2174,12 +2127,22 @@ void MFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
         connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged()));
     }
 
+#ifdef QT_OPENGL_ES_2_ANGLE
+    if (m_presenterActivate)
+        m_presenterActivate->setSurface(m_surface);
+    else
+#endif
     if (m_currentActivate)
         static_cast<VideoRendererActivate*>(m_currentActivate)->setSurface(m_surface);
 }
 
 void MFVideoRendererControl::customEvent(QEvent *event)
 {
+#ifdef QT_OPENGL_ES_2_ANGLE
+    if (m_presenterActivate)
+        return;
+#endif
+
     if (!m_currentActivate)
         return;
 
@@ -2203,23 +2166,48 @@ void MFVideoRendererControl::customEvent(QEvent *event)
 
 void MFVideoRendererControl::supportedFormatsChanged()
 {
+#ifdef QT_OPENGL_ES_2_ANGLE
+    if (m_presenterActivate)
+        m_presenterActivate->supportedFormatsChanged();
+    else
+#endif
     if (m_currentActivate)
         static_cast<VideoRendererActivate*>(m_currentActivate)->supportedFormatsChanged();
 }
 
 void MFVideoRendererControl::present()
 {
+#ifdef QT_OPENGL_ES_2_ANGLE
+    if (m_presenterActivate)
+        return;
+#endif
+
     if (m_currentActivate)
         static_cast<VideoRendererActivate*>(m_currentActivate)->present();
 }
 
 IMFActivate* MFVideoRendererControl::createActivate()
 {
+    Q_ASSERT(m_surface);
+
     clear();
 
+#ifdef QT_OPENGL_ES_2_ANGLE
+    // We can use the EVR with our custom presenter only if the surface supports OpenGL
+    // texture handles
+    if (!m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()) {
+        // Create the EVR media sink, but replace the presenter with our own
+        if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
+            m_presenterActivate = new EVRCustomPresenterActivate;
+            m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
+        }
+    }
+
+    if (!m_currentActivate)
+#endif
     m_currentActivate = new VideoRendererActivate(this);
-    if (m_surface)
-        setSurface(m_surface);
+
+    setSurface(m_surface);
 
     return m_currentActivate;
 }
index b0db4dc..d796ce3 100644 (file)
 #include <mfapi.h>
 #include <mfidl.h>
 
+QT_BEGIN_NAMESPACE
+
+#ifdef QT_OPENGL_ES_2_ANGLE
+class EVRCustomPresenterActivate;
+#endif
+
+QT_END_NAMESPACE
+
 QT_USE_NAMESPACE
 
 class MFVideoRendererControl : public QVideoRendererControl
@@ -74,6 +82,10 @@ private:
     QAbstractVideoSurface *m_surface;
     IMFActivate *m_currentActivate;
     IMFSampleGrabberSinkCallback *m_callback;
+
+#ifdef QT_OPENGL_ES_2_ANGLE
+    EVRCustomPresenterActivate *m_presenterActivate;
+#endif
 };
 
 #endif
index 34bd2c3..a26f319 100644 (file)
@@ -16,14 +16,32 @@ HEADERS += \
     mfstream.h \
     sourceresolver.h \
     samplegrabber.h \
-    mftvideo.h
+    mftvideo.h \
+    mfglobal.h \
+    mfactivate.h
 
 SOURCES += \
     wmfserviceplugin.cpp \
     mfstream.cpp \
     sourceresolver.cpp \
     samplegrabber.cpp \
-    mftvideo.cpp
+    mftvideo.cpp \
+    mfactivate.cpp \
+    mfglobal.cpp
+
+contains(QT_CONFIG, angle) {
+    LIBS += -ld3d9 -ldxva2 -lwinmm -levr
+    QT += gui-private
+
+    HEADERS += \
+        evrcustompresenter.h \
+        evrd3dpresentengine.h
+
+    SOURCES += \
+        evrcustompresenter.cpp \
+        evrd3dpresentengine.cpp
+}
+
 
 include (player/player.pri)
 include (decoder/decoder.pri)