diff --git a/AVQt/CMakeLists.txt b/AVQt/CMakeLists.txt
index 1c630dfcb7262ff4475df253754a7ace5cb7a51e..e5e4468d03d6933c551c6d154247123179f8a371 100644
--- a/AVQt/CMakeLists.txt
+++ b/AVQt/CMakeLists.txt
@@ -201,6 +201,6 @@ if (WIN32)
     target_link_libraries(AVQt opengl32 OpenAL32)
     target_link_libraries(AVQtStatic opengl32 OpenAL32)
 else ()
-    target_link_libraries(AVQt GL openal EGL GLU)
-    target_link_libraries(AVQtStatic GL openal EGL GLU)
+    target_link_libraries(AVQt GL openal EGL GLU va)
+    target_link_libraries(AVQtStatic GL openal EGL GLU va)
 endif ()
\ No newline at end of file
diff --git a/AVQt/filter/DecoderQSV.cpp b/AVQt/filter/DecoderQSV.cpp
index fd867e3f342692a938ff7fed4cf9dc133a26b509..1107b8dc68164fc134d02cf3f4e076ca55797792 100644
--- a/AVQt/filter/DecoderQSV.cpp
+++ b/AVQt/filter/DecoderQSV.cpp
@@ -319,7 +319,6 @@ namespace AVQt {
                         cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0),
                                     av_buffer_ref(d->m_pDeviceCtx));
                         qDebug() << "Video CB time:" << time.msecsTo(QTime::currentTime());
-                        av_frame_unref(cbFrame);
                         av_frame_free(&cbFrame);
 //                        }));
                     }
diff --git a/AVQt/filter/EncoderQSV.cpp b/AVQt/filter/EncoderQSV.cpp
index a2c760d92c1e1709978f2e580cd28bcc6815804d..888521b59d24d394bb21b906bfea00bb0bd1fa6f 100644
--- a/AVQt/filter/EncoderQSV.cpp
+++ b/AVQt/filter/EncoderQSV.cpp
@@ -97,6 +97,8 @@ namespace AVQt {
             av_buffer_unref(&d->m_pFramesCtx);
         }
 
+        d->m_framerate = {0, 1};
+
         d->m_pLockedSource = nullptr;
 
         return 0;
@@ -162,8 +164,6 @@ namespace AVQt {
     }
 
     int EncoderQSV::init(IFrameSource *source, AVRational framerate, int64_t duration) {
-        Q_UNUSED(duration)
-        Q_UNUSED(framerate)
         Q_D(AVQt::EncoderQSV);
 
         if (d->m_pLockedSource) {
@@ -171,6 +171,8 @@ namespace AVQt {
         }
 
         d->m_pLockedSource = source;
+        d->m_framerate = framerate;
+        d->m_duration = duration;
         init();
         return 0;
     }
@@ -225,6 +227,12 @@ namespace AVQt {
             QMutexLocker lock{&d->m_cbListMutex};
             if (!d->m_cbList.contains(packetSink)) {
                 d->m_cbList.append(packetSink);
+                if (d->m_running.load() && d->m_pCodecCtx) {
+                    AVCodecParameters *parameters = avcodec_parameters_alloc();
+                    avcodec_parameters_from_context(parameters, d->m_pCodecCtx);
+                    packetSink->init(this, d->m_framerate, d->m_pCodecCtx->time_base, d->m_duration, parameters, nullptr, nullptr);
+                    packetSink->start(this);
+                }
                 return d->m_cbList.indexOf(packetSink);
             } else {
                 return -1;
@@ -238,6 +246,7 @@ namespace AVQt {
         {
             QMutexLocker lock{&d->m_cbListMutex};
             auto count = d->m_cbList.removeAll(packetSink);
+            packetSink->deinit(this);
             return count > 0 ? count : -1;
         }
     }
@@ -256,30 +265,27 @@ namespace AVQt {
         QPair<AVFrame *, int64_t> queueFrame{av_frame_alloc(), frameDuration};
         switch (frame->format) {
             case AV_PIX_FMT_QSV:
+                if (!d->m_pDeviceCtx) {
+                    d->m_pDeviceCtx = av_buffer_ref(pDeviceCtx);
+                    d->m_pFramesCtx = av_buffer_ref(frame->hw_frames_ctx);
+                }
+                qDebug("Referencing frame");
+                av_frame_ref(queueFrame.first, frame);
+                break;
             case AV_PIX_FMT_DRM_PRIME:
             case AV_PIX_FMT_VAAPI:
                 if (!d->m_pDeviceCtx) {
-//                    d->m_pDeviceCtx = av_buffer_ref(pDeviceCtx);
-//                    d->m_pFramesCtx = av_buffer_ref(frame->hw_frames_ctx);
                     ret = av_hwdevice_ctx_create_derived(&d->m_pDeviceCtx, AV_HWDEVICE_TYPE_QSV, pDeviceCtx, 0);
                     if (ret != 0) {
                         qFatal("[AVQt::EncoderQSV] %d: Could not create derived AVHWDeviceContext: %s", ret,
                                av_make_error_string(strBuf, strBufSize, ret));
                     }
-                    AVHWFramesContext *framesCtx, *framesCtxOld;
-                    framesCtxOld = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
-                    ret = av_hwframe_ctx_create_derived(&d->m_pFramesCtx, framesCtxOld->sw_format, d->m_pDeviceCtx, frame->hw_frames_ctx, AV_HWFRAME_MAP_READ);
+                    ret = av_hwframe_ctx_create_derived(&d->m_pFramesCtx, AV_PIX_FMT_QSV, d->m_pDeviceCtx, frame->hw_frames_ctx,
+                                                        AV_HWFRAME_MAP_READ);
                     if (ret != 0) {
                         qFatal("[AVQt::EncoderQSV] %d: Could not create derived AVHWFramesContext: %s", ret,
                                av_make_error_string(strBuf, strBufSize, ret));
                     }
-                    framesCtx = reinterpret_cast<AVHWFramesContext *>(d->m_pFramesCtx->data);
-
-                    framesCtx->sw_format = framesCtxOld->sw_format;
-                    framesCtx->format = AV_PIX_FMT_QSV;
-                    framesCtx->width = framesCtxOld->width;
-                    framesCtx->height = framesCtxOld->height;
-                    framesCtx->initial_pool_size = framesCtxOld->initial_pool_size;
 
                     ret = av_hwframe_ctx_init(d->m_pFramesCtx);
                     if (ret != 0) {
@@ -287,13 +293,7 @@ namespace AVQt {
                                av_make_error_string(strBuf, strBufSize, ret));
                     }
                 }
-                queueFrame.first->hw_frames_ctx = av_buffer_ref(d->m_pFramesCtx);
-                queueFrame.first->format = AV_PIX_FMT_QSV;
-                ret = av_hwframe_map(queueFrame.first, frame, AV_HWFRAME_MAP_READ);
-                if (ret != 0) {
-                    qFatal("[AVQt::EncoderQSV] %d: Could not map frame from source device to QSV: %s", ret,
-                           av_make_error_string(strBuf, strBufSize, ret));
-                }
+                av_frame_ref(queueFrame.first, frame);
                 break;
             case AV_PIX_FMT_DXVA2_VLD:
                 qDebug("Transferring frame from GPU to CPU");
@@ -306,7 +306,7 @@ namespace AVQt {
                 break;
         }
 
-        while (d->m_inputQueue.size() > 4) {
+        while (d->m_inputQueue.size() > 6) {
             QThread::msleep(1);
         }
         {
@@ -386,7 +386,7 @@ namespace AVQt {
                     d->m_pCodecCtx->color_primaries = AVCOL_PRI_BT2020;
                     d->m_pCodecCtx->color_trc = AVCOL_TRC_SMPTE2084;
                     d->m_pCodecCtx->colorspace = AVCOL_SPC_BT2020_NCL;
-                    d->m_pCodecCtx->framerate = av_make_q(60, 1);
+                    d->m_pCodecCtx->framerate = d->m_framerate;
 
                     // Timestamps from frame sources are always microseconds, trying to use this timebase for the encoder too
                     d->m_pCodecCtx->time_base = av_make_q(1, 1000000);
@@ -400,7 +400,7 @@ namespace AVQt {
                     for (const auto &cb: d->m_cbList) {
                         AVCodecParameters *parameters = avcodec_parameters_alloc();
                         avcodec_parameters_from_context(parameters, d->m_pCodecCtx);
-                        cb->init(this, av_make_q(0, 1), d->m_pCodecCtx->time_base, 0, parameters, nullptr, nullptr);
+                        cb->init(this, d->m_framerate, d->m_pCodecCtx->time_base, 0, parameters, nullptr, nullptr);
                         cb->start(this);
                     }
                 }
@@ -411,11 +411,27 @@ namespace AVQt {
                         frame = d->m_inputQueue.dequeue();
                     }
                     if (frame.first->hw_frames_ctx) {
-                        frame.first->pts = av_rescale_q(frame.first->pts, av_make_q(1, 1000000),
-                                                        d->m_pCodecCtx->time_base); // Incoming timestamps are always microseconds
-                        ret = avcodec_send_frame(d->m_pCodecCtx, frame.first);
-                        qDebug("[AVQt::EncoderQSV] Sent frame with PTS %lld to encoder", static_cast<long long>(frame.first->pts));
+                        AVFrame *outFrame;
+                        switch (frame.first->format) {
+                            case AV_PIX_FMT_VAAPI:
+                            case AV_PIX_FMT_DRM_PRIME:
+                                qDebug("[AVQt::EncoderQSV] Mapping VAAPI/DRM_PRIME frame to QSV");
+                                outFrame = av_frame_alloc();
+                                outFrame->hw_frames_ctx = av_buffer_ref(d->m_pFramesCtx);
+                                outFrame->format = AV_PIX_FMT_QSV;
+                                av_hwframe_map(outFrame, frame.first, AV_HWFRAME_MAP_DIRECT | AV_HWFRAME_MAP_READ);
+                                break;
+                            case AV_PIX_FMT_QSV:
+                                outFrame = av_frame_clone(frame.first);
+                                break;
+
+                        }
+                        outFrame->pts = av_rescale_q(frame.first->pts, av_make_q(1, 1000000),
+                                                     d->m_pCodecCtx->time_base); // Incoming timestamps are always microseconds
+                        ret = avcodec_send_frame(d->m_pCodecCtx, outFrame);
+                        qDebug("[AVQt::EncoderQSV] Sent frame with PTS %lld to encoder", static_cast<long long>(outFrame->pts));
                         av_frame_free(&frame.first);
+                        av_frame_free(&outFrame);
                     } else {
                         av_hwframe_transfer_data(d->m_pHWFrame, frame.first, 0);
                         d->m_pHWFrame->pts = av_rescale_q(frame.first->pts, av_make_q(1, 1000000),
diff --git a/AVQt/filter/private/EncoderQSV_p.h b/AVQt/filter/private/EncoderQSV_p.h
index 8c3a545e42e25be8d647c77b0e0ad93f1fb51a4e..7e967148549fdcd14a7edec22a3fc8d18e765c90 100644
--- a/AVQt/filter/private/EncoderQSV_p.h
+++ b/AVQt/filter/private/EncoderQSV_p.h
@@ -24,19 +24,20 @@ namespace AVQt {
         IEncoder::CODEC m_codec{IEncoder::CODEC::H264};
         int m_bitrate{5 * 1024 * 1024};
 
-        AVRational m_framerate{0, 1}; // TODO: Remove d->m_framerate
+        AVRational m_framerate{0, 1};
+        int64_t m_duration;
         AVCodec *m_pCodec{nullptr};
         AVCodecContext *m_pCodecCtx{nullptr};
         AVBufferRef *m_pDeviceCtx{nullptr}, *m_pFramesCtx{nullptr};
         static constexpr auto HW_FRAME_POOL_SIZE = 4;
         AVFrame *m_pHWFrame{nullptr};
-        QMutex m_codecMutex{};
 
         QMutex m_inputQueueMutex{};
-        QQueue<QPair<AVFrame *, int64_t>> m_inputQueue{};
 
+        QQueue<QPair<AVFrame *, int64_t>> m_inputQueue{};
         IFrameSource *m_pLockedSource = nullptr;
         QMutex m_cbListMutex{};
+
         QList<IPacketSink *> m_cbList{};
 
         QMutex m_onFrameMutex{};
diff --git a/AVQt/output/OpenGLRenderer.cpp b/AVQt/output/OpenGLRenderer.cpp
index ef995e49d4b1c6f211f11daffb5ec5f9dfa0d913..0054225f6cfc60390d2568f574835377815714df 100644
--- a/AVQt/output/OpenGLRenderer.cpp
+++ b/AVQt/output/OpenGLRenderer.cpp
@@ -6,6 +6,7 @@
 #include "OpenGLRenderer.h"
 
 #include <QtGui>
+#include <QtConcurrent>
 #include <QImage>
 
 #include <va/va.h>
@@ -15,7 +16,6 @@
 #include <EGL/egl.h>
 #include <EGL/eglext.h>
 #include <GL/glu.h>
-#include <cstdio>
 #include <unistd.h>
 #include <iostream>
 
@@ -162,7 +162,8 @@ namespace AVQt {
                 QMutexLocker lock(&d->m_renderQueueMutex);
 
                 for (auto &e: d->m_renderQueue) {
-                    av_frame_unref(e.first);
+                    e.waitForFinished();
+                    av_frame_unref(e.result());
                 }
 
                 d->m_renderQueue.clear();
@@ -221,80 +222,69 @@ namespace AVQt {
 
         QMutexLocker onFrameLock{&d->m_onFrameMutex};
 
-        bool shouldBe = true;
+        QFuture<AVFrame *> queueFrame;
 
-        QPair<AVFrame *, int64_t> newFrame;
-
-        newFrame.first = av_frame_alloc();
 //        av_frame_ref(newFrame.first, frame);
         constexpr auto strBufSize = 64;
         char strBuf[strBufSize];
         qDebug("Pixel format: %s", av_get_pix_fmt_string(strBuf, 64, static_cast<AVPixelFormat>(frame->format)));
         switch (frame->format) {
-//            case AV_PIX_FMT_VAAPI:
-//            case AV_PIX_FMT_DRM_PRIME:
+            case AV_PIX_FMT_QSV:
+            case AV_PIX_FMT_CUDA:
+            case AV_PIX_FMT_VDPAU:
+            case AV_PIX_FMT_D3D11VA_VLD:
             case AV_PIX_FMT_DXVA2_VLD:
                 qDebug("Transferring frame from GPU to CPU");
-                av_hwframe_transfer_data(newFrame.first, frame, 0);
-                break;
-            case AV_PIX_FMT_QSV:
-                qDebug("[AVQt::OpenGLRenderer] Mapping QSV frame to VAAPI for rendering");
-#ifdef Q_OS_LINUX
-                if (!d->m_pQSVDerivedDeviceContext) {
-                    int ret = av_hwdevice_ctx_create(&d->m_pQSVDerivedDeviceContext, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr,
-                                                     0);
-                    if (ret != 0) {
-                        qFatal("[AVQt::OpenGLRenderer] %i: Could not create derived VAAPI context: %s", ret,
-                               av_make_error_string(strBuf, strBufSize, ret));
-                    }
-                    d->m_pQSVDerivedFramesContext = av_hwframe_ctx_alloc(d->m_pQSVDerivedDeviceContext);
-                    auto framesCtx = reinterpret_cast<AVHWFramesContext *>(d->m_pQSVDerivedFramesContext->data);
-                    auto framesCtxOld = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
-                    framesCtx->sw_format = framesCtxOld->sw_format;
-                    framesCtx->format = AV_PIX_FMT_VAAPI;
-                    framesCtx->width = framesCtxOld->width;
-                    framesCtx->height = framesCtxOld->height;
-                    framesCtx->initial_pool_size = framesCtxOld->initial_pool_size;
-
-                    ret = av_hwframe_ctx_init(d->m_pQSVDerivedFramesContext);
-
-                    if (ret != 0) {
-                        qFatal("[AVQt::OpenGLRenderer] %i: Could not create derived frames context: %s", ret,
-                               av_make_error_string(strBuf, strBufSize, ret));
-                    }
-                }
-                newFrame.first->hw_frames_ctx = av_buffer_ref(d->m_pQSVDerivedFramesContext);
-                av_hwframe_map(newFrame.first, frame, AV_HWFRAME_MAP_READ);
-#else
-                qFatal("[AVQt::OpenGLRenderer] Mapping QSV frame to other than VAAPI is currently not supported");
-#endif
+                queueFrame = QtConcurrent::run([](AVFrame *input) {
+                    AVFrame *outFrame = av_frame_alloc();
+                    av_hwframe_transfer_data(outFrame, input, 0);
+                    outFrame->pts = input->pts;
+                    av_frame_free(&input);
+                    return outFrame;
+                }, av_frame_clone(frame));
                 break;
+//            case AV_PIX_FMT_QSV: {
+//                qDebug("[AVQt::OpenGLRenderer] Mapping QSV frame to CPU for rendering");
+//                queueFrame = QtConcurrent::run([d](AVFrame *input) {
+//                    AVFrame *outFrame = av_frame_alloc();
+//                    int ret = av_hwframe_map(outFrame, input, AV_HWFRAME_MAP_READ);
+//                    if (ret != 0) {
+//                        constexpr auto strBufSize = 64;
+//                        char strBuf[strBufSize];
+//                        qFatal("[AVQt::OpenGLRenderer] %d Could not map QSV frame to CPU: %s", ret,
+//                               av_make_error_string(strBuf, strBufSize, ret));
+//                    }
+//                    outFrame->pts = input->pts;
+//                    av_frame_free(&input);
+//                    return outFrame;
+//                }, av_frame_clone(frame));
+//                break;
+//            }
             default:
                 qDebug("Referencing frame");
-                av_frame_ref(newFrame.first, frame);
+                queueFrame = QtConcurrent::run([d](AVFrame *input, AVBufferRef *pDeviceCtx) {
+                    bool shouldBe = true;
+                    if (d->m_firstFrame.compare_exchange_strong(shouldBe, false) && input->format == AV_PIX_FMT_VAAPI) {
+                        d->m_pVAContext = static_cast<AVVAAPIDeviceContext *>(reinterpret_cast<AVHWDeviceContext *>(pDeviceCtx->data)->hwctx);
+                        d->m_VADisplay = d->m_pVAContext->display;
+                    }
+                    return input;
+                }, av_frame_clone(frame), pDeviceCtx);
                 break;
         }
 
-        if (d->m_firstFrame.compare_exchange_strong(shouldBe, false) && newFrame.first->format == AV_PIX_FMT_VAAPI) {
-            d->m_pVAContext = static_cast<AVVAAPIDeviceContext *>(reinterpret_cast<AVHWDeviceContext *>(pDeviceCtx->data)->hwctx);
-            d->m_VADisplay = d->m_pVAContext->display;
-            av_buffer_unref(&pDeviceCtx);
-        }
-
-        newFrame.first->pts = frame->pts;
-        newFrame.second = duration;
 //        av_frame_unref(frame);
 
 //        char strBuf[64];
         //qDebug() << "Pixel format:" << av_get_pix_fmt_string(strBuf, 64, static_cast<AVPixelFormat>(frame->format));
 
 
-        while (d->m_renderQueue.size() >= 4) {
+        while (d->m_renderQueue.size() > 6) {
             QThread::msleep(4);
         }
 
         QMutexLocker lock(&d->m_renderQueueMutex);
-        d->m_renderQueue.enqueue(newFrame);
+        d->m_renderQueue.enqueue(queueFrame);
     }
 
     void OpenGLRenderer::initializeGL() {
@@ -424,23 +414,22 @@ namespace AVQt {
                 }
                 if (!d->m_renderQueue.isEmpty()) {
                     auto timestamp = d->m_clock->getTimestamp();
-                    if (timestamp >= d->m_renderQueue.first().first->pts) {
+                    if (timestamp >= d->m_renderQueue.first().result()->pts) {
                         d->m_updateRequired.store(true);
                         d->m_updateTimestamp.store(timestamp);
                     }
                 }
                 if (d->m_updateRequired.load() && !d->m_renderQueue.isEmpty()) {
                     d->m_updateRequired.store(false);
-                    QPair<AVFrame *, int64_t> frame = d->m_renderQueue.dequeue();
+                    auto frame = d->m_renderQueue.dequeue().result();
                     while (!d->m_renderQueue.isEmpty()) {
-                        if (frame.first->pts <= d->m_updateTimestamp.load()) {
-                            if (d->m_renderQueue.first().first->pts >= d->m_updateTimestamp.load()) {
+                        if (frame->pts <= d->m_updateTimestamp.load()) {
+                            if (d->m_renderQueue.first().result()->pts >= d->m_updateTimestamp.load()) {
                                 break;
                             } else {
-                                qDebug("Discarding video frame at PTS: %lld < PTS: %lld", static_cast<long long>(frame.first->pts),
+                                qDebug("Discarding video frame at PTS: %lld < PTS: %lld", static_cast<long long>(frame->pts),
                                        d->m_updateTimestamp.load());
-                                av_frame_unref(frame.first);
-                                av_frame_free(&frame.first);
+                                av_frame_free(&frame);
                             }
                         }
                         QMutexLocker lock2(&d->m_renderQueueMutex);
@@ -454,7 +443,7 @@ namespace AVQt {
                         QMutexLocker lock(&d->m_currentFrameMutex);
                         firstFrame = !d->m_currentFrame;
                         if (!firstFrame) {
-                            if (d->m_currentFrame->format == frame.first->format) {
+                            if (d->m_currentFrame->format == frame->format) {
                                 differentPixFmt = false;
                             }
                         }
@@ -463,7 +452,7 @@ namespace AVQt {
                             av_frame_free(&d->m_currentFrame);
                         }
 
-                        d->m_currentFrame = frame.first;
+                        d->m_currentFrame = frame;
                     }
 
                     if (firstFrame) {
diff --git a/AVQt/output/private/OpenGLRenderer_p.h b/AVQt/output/private/OpenGLRenderer_p.h
index 06d480c8b6e8bbde8bce7b2d3f67603250472b78..0e045224bea43b8d8a8fcfd53c352638403cba8e 100644
--- a/AVQt/output/private/OpenGLRenderer_p.h
+++ b/AVQt/output/private/OpenGLRenderer_p.h
@@ -57,7 +57,7 @@ namespace AVQt {
 
         QMutex m_onFrameMutex{};
         QMutex m_renderQueueMutex{};
-        QQueue<QPair<AVFrame *, int64_t>> m_renderQueue{};
+        QQueue<QFuture<AVFrame *>> m_renderQueue{};
 
         RenderClock *m_clock{nullptr};
         QTime m_duration{};
diff --git a/Player/CMakeLists.txt b/Player/CMakeLists.txt
index e3fa22b28c5b5cf8832722c111f540ad1d17695f..50cfd2797dfc1071257cf2b8f96cae064cb637c8 100644
--- a/Player/CMakeLists.txt
+++ b/Player/CMakeLists.txt
@@ -11,13 +11,15 @@ find_package(OpenAL)
 #add_compile_options(-fsanitize=address)
 #add_link_options(-fsanitize=address)
 
+add_compile_definitions(ENABLE_QSV_ENCODE)
+
 add_executable(Player main.cpp)
 
-#target_link_libraries(Player Qt6::Core Qt6::Gui Qt6::Concurrent Qt6::Widgets Qt6::OpenGLWidgets Qt6::OpenGL avformat avfilter avutil avcodec avdevice swscale swresample AVQt)
-target_link_libraries(Player Qt5::Core Qt5::Gui Qt5::Concurrent Qt5::Widgets Qt5::OpenGL avformat avfilter avutil avcodec avdevice swscale swresample AVQt)
+#target_link_libraries(Player Qt6::Core Qt6::Gui Qt6::Concurrent Qt6::Widgets Qt6::OpenGLWidgets Qt6::OpenGL avformat avfilter avutil avcodec avdevice swscale swresample AVQtStatic)
+target_link_libraries(Player Qt5::Core Qt5::Gui Qt5::Concurrent Qt5::Widgets Qt5::OpenGL avformat avfilter avutil avcodec avdevice swscale swresample AVQtStatic)
 
 if (LINUX)
-    target_link_libraries(Player openal GL EGL GLU)
+    target_link_libraries(Player openal GL EGL GLU va)
 elseif (WIN32)
     target_link_libraries(Player OpenAL32 opengl32)
 endif ()
\ No newline at end of file
diff --git a/Player/main.cpp b/Player/main.cpp
index e9c2d8e8fc8c44ebc4879930fb0fae2f87d57b33..a10705fc30a5938e800d42a627d48686c4c5116f 100644
--- a/Player/main.cpp
+++ b/Player/main.cpp
@@ -83,8 +83,9 @@ int main(int argc, char *argv[]) {
     AVQt::OpenGLRenderer renderer;
 
     demuxer.registerCallback(videoDecoder, AVQt::IPacketSource::CB_VIDEO);
-//    videoDecoder->registerCallback(videoEncoder);
-
+#ifdef ENABLE_QSV_ENCODE
+    videoDecoder->registerCallback(videoEncoder);
+#endif
     QFile outputFile("output.mp4");
     outputFile.open(QIODevice::ReadWrite | QIODevice::Truncate);
     outputFile.seek(0);
@@ -110,6 +111,7 @@ int main(int argc, char *argv[]) {
 
     QObject::connect(app, &QApplication::aboutToQuit, [&] {
         demuxer.deinit();
+        muxer.deinit(videoEncoder);
         delete videoEncoder;
         delete videoDecoder;
     });