diff --git a/AVQt/AVQt b/AVQt/AVQt
index 7e31b527e2422a529bd1514167be7a47355ad16a..3acd66f277e2bb6201c41f916dc7d9ea09cce65c 100644
--- a/AVQt/AVQt
+++ b/AVQt/AVQt
@@ -14,4 +14,5 @@
 #include "output/IAudioSink.h"
 #include "output/OpenGLRenderer.h"
 #include "output/OpenALAudioOutput.h"
+#include "output/Muxer.h"
 //#include "output/FrameFileSaver.h"
\ No newline at end of file
diff --git a/AVQt/CMakeLists.txt b/AVQt/CMakeLists.txt
index 436bf3967c1ccb18c9ba68c3293332a5cb754bd2..1f4c491de4e00118a034c538b15fb4ba169a5cf8 100644
--- a/AVQt/CMakeLists.txt
+++ b/AVQt/CMakeLists.txt
@@ -12,6 +12,9 @@ set(CMAKE_INCLUDE_CURRENT_DIR on)
 #find_package(Qt6 COMPONENTS Core Gui Concurrent Widgets OpenGL OpenGLWidgets)
 find_package(Qt5 COMPONENTS Core Gui Concurrent Widgets OpenGL)
 
+#add_compile_options(-fsanitize=address)
+#add_link_options(-fsanitize=address)
+
 #if (${CMAKE_BUILD_TYPE} EQUAL "Release")
 #    -O3)
 #endif ()
@@ -74,6 +77,10 @@ set(SOURCES
         output/OpenALAudioOutput.h
         output/private/OpenALAudioOutput_p.h
         output/OpenALAudioOutput.cpp
+
+        output/Muxer.h
+        output/private/Muxer_p.h
+        output/Muxer.cpp
         )
 
 add_library(AVQt SHARED ${SOURCES})
@@ -179,6 +186,6 @@ if (WIN32)
     target_link_libraries(AVQt opengl32 OpenAL32)
     target_link_libraries(AVQtStatic opengl32 OpenAL32)
 else ()
-    target_link_libraries(AVQt GL openal)
-    target_link_libraries(AVQtStatic GL openal)
+    target_link_libraries(AVQt GL openal EGL GLU)
+    target_link_libraries(AVQtStatic GL openal EGL GLU)
 endif ()
\ No newline at end of file
diff --git a/AVQt/filter/DecoderDXVA2.cpp b/AVQt/filter/DecoderDXVA2.cpp
index 41c5c294f37b2a6558dab609c3b126fea351c6d1..6e2219d47cdda936ab156767ce646b4ddd35a3a4 100644
--- a/AVQt/filter/DecoderDXVA2.cpp
+++ b/AVQt/filter/DecoderDXVA2.cpp
@@ -287,7 +287,7 @@ namespace AVQt {
                                d->m_timebase.num,
                                d->m_timebase.den);
                         QTime time = QTime::currentTime();
-                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0));
+                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0), d->m_pDeviceCtx);
                         qDebug() << "Video CB time:" << time.msecsTo(QTime::currentTime());
                         av_frame_unref(cbFrame);
                         av_frame_free(&cbFrame);
diff --git a/AVQt/filter/DecoderMMAL.cpp b/AVQt/filter/DecoderMMAL.cpp
index 2ddf4d6df1a95f9e61e51386d57ae98897e5a936..7a84383b65c4e5e663fae26782e4d1981f026ee6 100644
--- a/AVQt/filter/DecoderMMAL.cpp
+++ b/AVQt/filter/DecoderMMAL.cpp
@@ -292,7 +292,7 @@ namespace AVQt {
                                d->m_timebase.num,
                                d->m_timebase.den);
                         QTime time = QTime::currentTime();
-                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0));
+                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0), nullptr);
                         qDebug() << "Video CB time:" << time.msecsTo(QTime::currentTime());
                         av_frame_unref(cbFrame);
                         av_frame_free(&cbFrame);
diff --git a/AVQt/filter/DecoderQSV.cpp b/AVQt/filter/DecoderQSV.cpp
index 464d43c5ed3d288e1e52208cddb857c708fc65bc..fd867e3f342692a938ff7fed4cf9dc133a26b509 100644
--- a/AVQt/filter/DecoderQSV.cpp
+++ b/AVQt/filter/DecoderQSV.cpp
@@ -211,9 +211,37 @@ namespace AVQt {
                 char strBuf[strBufSize];
                 // If m_pCodecParams is nullptr, it is not initialized by packet source, if video codec context is nullptr, this is the first packet
                 if (d->m_pCodecParams && !d->m_pCodecCtx) {
-                    d->m_pCodec = avcodec_find_decoder(d->m_pCodecParams->codec_id);
+                    switch (d->m_pCodecParams->codec_id) {
+                        case AV_CODEC_ID_H264:
+                            d->m_pCodec = avcodec_find_decoder_by_name("h264_qsv");
+                            break;
+                        case AV_CODEC_ID_HEVC:
+                            d->m_pCodec = avcodec_find_decoder_by_name("hevc_qsv");
+                            break;
+                        case AV_CODEC_ID_VP8:
+                            d->m_pCodec = avcodec_find_decoder_by_name("vp8_qsv");
+                            break;
+                        case AV_CODEC_ID_VP9:
+                            d->m_pCodec = avcodec_find_decoder_by_name("vp9_qsv");
+                            break;
+                        case AV_CODEC_ID_VC1:
+                            d->m_pCodec = avcodec_find_decoder_by_name("vc1_qsv");
+                            break;
+                        case AV_CODEC_ID_AV1:
+                            d->m_pCodec = avcodec_find_decoder_by_name("av1_qsv");
+                            break;
+                        case AV_CODEC_ID_MPEG2VIDEO:
+                            d->m_pCodec = avcodec_find_decoder_by_name("mpeg2_qsv");
+                            break;
+                        case AV_CODEC_ID_MJPEG:
+                            d->m_pCodec = avcodec_find_decoder_by_name("mjpeg_qsv");
+                            break;
+                        default:
+                            qFatal("[AVQt::DecoderQSV] Unsupported codec: %s", av_fourcc_make_string(strBuf, d->m_pCodecParams->codec_id));
+                    }
+
                     if (!d->m_pCodec) {
-                        qFatal("No audio decoder found");
+                        qFatal("No video decoder found");
                     }
 
                     d->m_pCodecCtx = avcodec_alloc_context3(d->m_pCodec);
@@ -228,6 +256,7 @@ namespace AVQt {
 
                     avcodec_parameters_to_context(d->m_pCodecCtx, d->m_pCodecParams);
                     d->m_pCodecCtx->hw_device_ctx = av_buffer_ref(d->m_pDeviceCtx);
+                    d->m_pCodecCtx->get_format = &DecoderQSVPrivate::getFormat;
                     ret = avcodec_open2(d->m_pCodecCtx, d->m_pCodec, nullptr);
                     if (ret != 0) {
                         qFatal("%d: Could not open QSV decoder: %s", ret, av_make_error_string(strBuf, strBufSize, ret));
@@ -287,7 +316,8 @@ namespace AVQt {
                                d->m_timebase.num,
                                d->m_timebase.den);
                         QTime time = QTime::currentTime();
-                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0));
+                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0),
+                                    av_buffer_ref(d->m_pDeviceCtx));
                         qDebug() << "Video CB time:" << time.msecsTo(QTime::currentTime());
                         av_frame_unref(cbFrame);
                         av_frame_free(&cbFrame);
@@ -316,4 +346,44 @@ namespace AVQt {
             }
         }
     }
+
+#include <libavutil/hwcontext_qsv.h>
+
+    AVPixelFormat DecoderQSVPrivate::getFormat(AVCodecContext *pCodecCtx, const enum AVPixelFormat *pix_fmts) {
+        while (*pix_fmts != AV_PIX_FMT_NONE) {
+            if (*pix_fmts == AV_PIX_FMT_QSV) {
+                AVHWFramesContext *frames_ctx;
+                AVQSVFramesContext *frames_hwctx;
+                int ret;
+
+                /* create a pool of surfaces to be used by the decoder */
+                pCodecCtx->hw_frames_ctx = av_hwframe_ctx_alloc(pCodecCtx->hw_device_ctx);
+                if (!pCodecCtx->hw_frames_ctx) {
+                    return AV_PIX_FMT_NONE;
+                }
+                frames_ctx = (AVHWFramesContext *) pCodecCtx->hw_frames_ctx->data;
+                frames_hwctx = static_cast<AVQSVFramesContext *>(frames_ctx->hwctx);
+
+                frames_ctx->format = AV_PIX_FMT_QSV;
+                frames_ctx->sw_format = pCodecCtx->sw_pix_fmt;
+                frames_ctx->width = FFALIGN(pCodecCtx->coded_width, 32);
+                frames_ctx->height = FFALIGN(pCodecCtx->coded_height, 32);
+                frames_ctx->initial_pool_size = 32;
+
+                frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
+
+                ret = av_hwframe_ctx_init(pCodecCtx->hw_frames_ctx);
+                if (ret < 0)
+                    return AV_PIX_FMT_NONE;
+
+                return AV_PIX_FMT_QSV;
+            }
+
+            pix_fmts++;
+        }
+
+        fprintf(stderr, "The QSV pixel format not offered in get_format()\n");
+
+        return AV_PIX_FMT_NONE;
+    }
 }
\ No newline at end of file
diff --git a/AVQt/filter/DecoderQSV.h b/AVQt/filter/DecoderQSV.h
index 3d740cf8066c269aef0c21ebf3a2a036359ddd4a..43cf1c454a99f8ee5d5d4b2565700bdb4770da73 100644
--- a/AVQt/filter/DecoderQSV.h
+++ b/AVQt/filter/DecoderQSV.h
@@ -2,11 +2,12 @@
 // Created by silas on 3/1/21.
 //
 
-#include "IDecoder.h"
 
 #include <QtCore>
+#include "IDecoder.h"
 #include <QtGui>
 
+
 extern "C" {
 #include <libavutil/avutil.h>
 #include <libavutil/hwcontext.h>
@@ -169,7 +170,6 @@ namespace AVQt {
          */
         DecoderQSVPrivate *d_ptr;
     };
-
 }
 
 #endif //TRANSCODE_DECODERQSV_H
\ No newline at end of file
diff --git a/AVQt/filter/DecoderVAAPI.cpp b/AVQt/filter/DecoderVAAPI.cpp
index d61dcecebc86002e501cdd314dbc8c3e104ba7d5..6aa87df86196d0727d5a83f2e36d5ac7c5e29092 100644
--- a/AVQt/filter/DecoderVAAPI.cpp
+++ b/AVQt/filter/DecoderVAAPI.cpp
@@ -220,12 +220,12 @@ namespace AVQt {
                 if (d->m_pCodecParams && !d->m_pCodecCtx) {
                     d->m_pCodec = avcodec_find_decoder(d->m_pCodecParams->codec_id);
                     if (!d->m_pCodec) {
-                        qFatal("No audio decoder found");
+                        qFatal("No video decoder found");
                     }
 
                     d->m_pCodecCtx = avcodec_alloc_context3(d->m_pCodec);
                     if (!d->m_pCodecCtx) {
-                        qFatal("Could not allocate audio decoder context, probably out of memory");
+                        qFatal("Could not allocate video decoder context, probably out of memory");
                     }
 
                     ret = av_hwdevice_ctx_create(&d->m_pDeviceCtx, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0);
@@ -262,18 +262,22 @@ namespace AVQt {
                         } else if (ret < 0) {
                             qFatal("%d: Error sending packet to VAAPI decoder: %s", ret, av_make_error_string(strBuf, strBufSize, ret));
                         }
-                        av_packet_unref(packet);
                         av_packet_free(&packet);
                         lock.relock();
                     }
                 }
-                AVFrame *frame = av_frame_alloc();
                 while (true) {
+                    AVFrame *frame = av_frame_alloc();
                     ret = avcodec_receive_frame(d->m_pCodecCtx, frame);
                     if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
                         break;
+                    } else if (ret == -12) { // -12 == Out of memory, didn't know the macro name
+                        av_frame_free(&frame);
+                        msleep(1);
+                        continue;
                     } else if (ret < 0) {
-                        qFatal("%d: Error receiving frame from VAAPI decoder: %s", ret, av_make_error_string(strBuf, strBufSize, ret));
+                        qFatal("%d: Error receiving frame %d from VAAPI decoder: %s", ret, d->m_pCodecCtx->frame_number,
+                               av_make_error_string(strBuf, strBufSize, ret));
                     }
 
 //                    auto t1 = NOW();
@@ -295,9 +299,8 @@ namespace AVQt {
                                d->m_timebase.num,
                                d->m_timebase.den);
                         QTime time = QTime::currentTime();
-                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0));
+                        cb->onFrame(this, cbFrame, static_cast<int64_t>(av_q2d(av_inv_q(d->m_framerate)) * 1000.0), d->m_pDeviceCtx);
                         qDebug() << "Video CB time:" << time.msecsTo(QTime::currentTime());
-                        av_frame_unref(cbFrame);
                         av_frame_free(&cbFrame);
 //                        }));
                     }
@@ -316,9 +319,9 @@ namespace AVQt {
 //                    qDebug("Decoder frame transfer time: %ld us", TIME_US(t1, t3));
 
 //                    av_frame_free(&swFrame);
-                    av_frame_unref(frame);
+                    av_frame_free(&frame);
                 }
-                av_frame_free(&frame);
+//                av_frame_free(&frame);
             } else {
                 msleep(4);
             }
diff --git a/AVQt/filter/EncoderVAAPI.cpp b/AVQt/filter/EncoderVAAPI.cpp
index e94235e68346c30b72239c85aa070701531039a6..f1f10f0fe99a8755e057c6d689c3b7f9a1543ac8 100644
--- a/AVQt/filter/EncoderVAAPI.cpp
+++ b/AVQt/filter/EncoderVAAPI.cpp
@@ -8,10 +8,11 @@
 #include "output/IPacketSink.h"
 
 namespace AVQt {
-    EncoderVAAPI::EncoderVAAPI(QString encoder, QObject *parent) : QThread(parent), d_ptr(new EncoderVAAPIPrivate(this)) {
+    EncoderVAAPI::EncoderVAAPI(CODEC codec, int bitrate, QObject *parent) : QThread(parent), d_ptr(new EncoderVAAPIPrivate(this)) {
         Q_D(AVQt::EncoderVAAPI);
 
-        d->m_encoder = std::move(encoder);
+        d->m_codec = codec;
+        d->m_bitrate = bitrate;
     }
 
     [[maybe_unused]] EncoderVAAPI::EncoderVAAPI(EncoderVAAPIPrivate &p) : d_ptr(&p) {
@@ -34,9 +35,34 @@ namespace AVQt {
 //        constexpr auto strBufSize = 64;
 //        char strBuf[strBufSize];
 
-        d->m_pCodec = avcodec_find_encoder_by_name(d->m_encoder.toLocal8Bit().constData());
+        std::string codec_name;
+        switch (d->m_codec) {
+            case CODEC::H264:
+                codec_name = "h264_vaapi";
+                break;
+            case CODEC::HEVC:
+                codec_name = "hevc_vaapi";
+                break;
+            case CODEC::VP9:
+                if (qEnvironmentVariable("LIBVA_DRIVER_NAME") == "iHD") {
+                    qFatal("[AVQt::EncoderVAAPI] Unsupported codec: VP9");
+                } else {
+                    codec_name = "vp9_vaapi";
+                }
+                break;
+            case CODEC::VP8:
+                codec_name = "vp8_vaapi";
+                break;
+            case CODEC::MPEG2:
+                codec_name = "mpeg2_vaapi";
+                break;
+            case CODEC::AV1:
+                qFatal("[AVQt::EncoderVAAPI] Unsupported codec: AV1");
+        }
+
+        d->m_pCodec = avcodec_find_encoder_by_name(codec_name.c_str());
         if (!d->m_pCodec) {
-            qFatal("Could not find encoder: %s", d->m_encoder.toLocal8Bit().constData());
+            qFatal("Could not find encoder: %s", codec_name.c_str());
         }
 
         return 0;
@@ -136,8 +162,8 @@ namespace AVQt {
     int EncoderVAAPI::init(IFrameSource *source, AVRational framerate, int64_t duration) {
         Q_UNUSED(source)
         Q_UNUSED(duration)
+        Q_UNUSED(framerate)
         Q_D(AVQt::EncoderVAAPI);
-        d->m_framerate = framerate;
         init();
         return 0;
     }
@@ -193,7 +219,7 @@ namespace AVQt {
         }
     }
 
-    void EncoderVAAPI::onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration) {
+    void EncoderVAAPI::onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration, AVBufferRef *pDeviceCtx) {
         Q_UNUSED(source)
         Q_D(AVQt::EncoderVAAPI);
 
@@ -212,7 +238,7 @@ namespace AVQt {
                 break;
         }
 
-        while (d->m_inputQueue.size() > 100) {
+        while (d->m_inputQueue.size() > 4) {
             QThread::msleep(1);
         }
         {
@@ -276,10 +302,10 @@ namespace AVQt {
 
                     d->m_pCodecCtx->hw_device_ctx = av_buffer_ref(d->m_pDeviceCtx);
                     d->m_pCodecCtx->hw_frames_ctx = av_buffer_ref(d->m_pFramesCtx);
-//                    d->m_pCodecCtx->bit_rate = 5000000;
-//                    d->m_pCodecCtx->rc_min_rate = 4500000;
-//                    d->m_pCodecCtx->rc_max_rate = 6000000;
-//                    d->m_pCodecCtx->rc_buffer_size = 10000000;
+                    d->m_pCodecCtx->bit_rate = d->m_bitrate;
+                    d->m_pCodecCtx->rc_min_rate = static_cast<int>(std::round(d->m_bitrate * 0.8));
+                    d->m_pCodecCtx->rc_max_rate = static_cast<int>(std::round(d->m_bitrate * 1.1));
+                    d->m_pCodecCtx->rc_buffer_size = d->m_bitrate * 2;
                     d->m_pCodecCtx->gop_size = 20;
                     d->m_pCodecCtx->max_b_frames = 0;
                     d->m_pCodecCtx->color_primaries = AVCOL_PRI_BT2020;
@@ -306,7 +332,7 @@ namespace AVQt {
                     for (const auto &cb: d->m_cbList) {
                         AVCodecParameters *parameters = avcodec_parameters_alloc();
                         avcodec_parameters_from_context(parameters, d->m_pCodecCtx);
-                        cb->init(this, d->m_framerate, d->m_pCodecCtx->time_base, 0, parameters, nullptr, nullptr);
+                        cb->init(this, av_make_q(0, 1), d->m_pCodecCtx->time_base, 0, parameters, nullptr, nullptr);
                         cb->start(this);
                     }
                 }
diff --git a/AVQt/filter/EncoderVAAPI.h b/AVQt/filter/EncoderVAAPI.h
index 617160defbe3ee073a00125e38b0b4cc4d8088e2..6de818797b378882e5f8a71e199168f9622bfd41 100644
--- a/AVQt/filter/EncoderVAAPI.h
+++ b/AVQt/filter/EncoderVAAPI.h
@@ -32,7 +32,7 @@ namespace AVQt {
         Q_INTERFACES(AVQt::IEncoder)
 
     public:
-        explicit EncoderVAAPI(QString encoder, QObject *parent = nullptr);
+        explicit EncoderVAAPI(CODEC codec, int bitrate, QObject *parent = nullptr);
 
         EncoderVAAPI(EncoderVAAPI &&other) noexcept;
 
@@ -74,7 +74,7 @@ namespace AVQt {
 
         Q_INVOKABLE void pause(IFrameSource *source, bool paused) Q_DECL_OVERRIDE;
 
-        Q_INVOKABLE void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration) Q_DECL_OVERRIDE;
+        Q_INVOKABLE void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration, AVBufferRef *pDeviceCtx) Q_DECL_OVERRIDE;
 
     signals:
 
diff --git a/AVQt/filter/IEncoder.h b/AVQt/filter/IEncoder.h
index 0554f17f0783899c26f1420fff0cae824e2b1e66..9c41a03269f7cced3df6a160bdc39317926f2039 100644
--- a/AVQt/filter/IEncoder.h
+++ b/AVQt/filter/IEncoder.h
@@ -15,6 +15,10 @@ namespace AVQt {
         Q_INTERFACES(AVQt::IPacketSource)
 
     public:
+        enum class CODEC {
+            H264, HEVC, VP9, VP8, MPEG2, AV1
+        };
+
         virtual ~IEncoder() = default;
 
         virtual bool isPaused() = 0;
@@ -31,7 +35,7 @@ namespace AVQt {
 
         Q_INVOKABLE virtual void pause(IFrameSource *source, bool pause) = 0;
 
-        Q_INVOKABLE virtual void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration) = 0;
+        Q_INVOKABLE virtual void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration, AVBufferRef *pDeviceCtx) = 0;
 
 
         // IPacketSource
diff --git a/AVQt/filter/private/DecoderQSV_p.h b/AVQt/filter/private/DecoderQSV_p.h
index ef82bc0524eda150b17de6eb0fc7d0a05b6761be..4a16439e3c6a45e01ecbede3f73f16bc806733ac 100644
--- a/AVQt/filter/private/DecoderQSV_p.h
+++ b/AVQt/filter/private/DecoderQSV_p.h
@@ -47,6 +47,8 @@ namespace AVQt {
         std::atomic_bool m_running{false};
         std::atomic_bool m_paused{false};
 
+        static AVPixelFormat getFormat(AVCodecContext *pCodecCtx, const enum AVPixelFormat *pix_fmts);
+
         friend class DecoderQSV;
     };
 }
diff --git a/AVQt/filter/private/EncoderVAAPI_p.h b/AVQt/filter/private/EncoderVAAPI_p.h
index 239bd63c8fde9d4705cdf6cfab0dc1cf98660e51..f310119b1df5cfde8b828a3875581e512de80ce0 100644
--- a/AVQt/filter/private/EncoderVAAPI_p.h
+++ b/AVQt/filter/private/EncoderVAAPI_p.h
@@ -21,9 +21,9 @@ namespace AVQt {
 
         EncoderVAAPI *q_ptr;
 
-        QString m_encoder{""};
+        IEncoder::CODEC m_codec{IEncoder::CODEC::H264};
+        int m_bitrate{5 * 1024 * 1024};
 
-        AVRational m_framerate{0, 1};
         AVCodec *m_pCodec{nullptr};
         AVCodecContext *m_pCodecCtx{nullptr};
         AVBufferRef *m_pDeviceCtx{nullptr}, *m_pFramesCtx{nullptr};
diff --git a/AVQt/input/Demuxer.cpp b/AVQt/input/Demuxer.cpp
index fe24e6fe396b7a8038b3aa6b01b432fc4bacb8dc..7b76a15080632b7d6fe091137cf3c6ed81878000 100644
--- a/AVQt/input/Demuxer.cpp
+++ b/AVQt/input/Demuxer.cpp
@@ -294,7 +294,7 @@ namespace AVQt {
                 for (const auto &cb: cbList) {
                     AVPacket *cbPacket = av_packet_clone(packet);
                     cb->onPacket(this, cbPacket, type);
-                    av_packet_unref(cbPacket);
+                    av_packet_free(&cbPacket);
                 }
                 if (elapsedTimer.hasExpired(500)) {
                     QByteArray aP, vP, sP, aR, vR, sR;
@@ -316,10 +316,12 @@ namespace AVQt {
                     qDebug();
                     elapsedTimer.restart();
                 }
+                av_packet_unref(packet);
             } else {
                 msleep(8);
             }
         }
+        av_packet_free(&packet);
     }
 
     Demuxer::Demuxer(Demuxer &&other) noexcept: d_ptr(other.d_ptr) {
diff --git a/AVQt/output/IFrameSink.h b/AVQt/output/IFrameSink.h
index 7f36132bb8714a93f67b2cbd2a9fa8cc810b60c0..bb818f88d548e9afe7ec7535be3004d949a65c79 100644
--- a/AVQt/output/IFrameSink.h
+++ b/AVQt/output/IFrameSink.h
@@ -9,6 +9,7 @@
 
 struct AVFrame;
 struct AVRational;
+struct AVBufferRef;
 
 namespace AVQt {
     /*!
@@ -89,7 +90,7 @@ namespace AVQt {
          * @param framerate Source stream framerate
          * @param duration Source frame presentation duration (inverse of framerate)
          */
-        Q_INVOKABLE virtual void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration) = 0;
+        Q_INVOKABLE virtual void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration, AVBufferRef *pDeviceCtx) = 0;
 
     signals:
 
diff --git a/AVQt/output/Muxer.cpp b/AVQt/output/Muxer.cpp
index 6f6788730b8b9ed4a43c5dff6f7a5ce48ec9fa53..d612c3110cfd16c10484e4d25035ca6cfa347cb1 100644
--- a/AVQt/output/Muxer.cpp
+++ b/AVQt/output/Muxer.cpp
@@ -6,9 +6,10 @@
 #include "Muxer.h"
 
 namespace AVQt {
-    Muxer::Muxer(QIODevice *outputDevice, QObject *parent) : QThread(parent), d_ptr(new MuxerPrivate(this)) {
+    Muxer::Muxer(QIODevice *outputDevice, FORMAT format, QObject *parent) : QThread(parent), d_ptr(new MuxerPrivate(this)) {
         Q_D(AVQt::Muxer);
         d->m_outputDevice = outputDevice;
+        d->m_format = format;
     }
 
     Muxer::Muxer(AVQt::MuxerPrivate &p) : d_ptr(&p) {
@@ -59,8 +60,37 @@ namespace AVQt {
             } else if (!d->m_outputDevice->isWritable()) {
                 qFatal("[AVQt::Muxer] Output device is not writable");
             }
+
+            QString outputFormat;
+            switch (d->m_format) {
+                case FORMAT::MP4:
+                    if (d->m_outputDevice->isSequential()) {
+                        qFatal("[AVQt::Muxer] MP4 output format is not available on sequential output devices like sockets");
+                    }
+                    outputFormat = "mp4";
+                    break;
+                case FORMAT::MOV:
+                    if (d->m_outputDevice->isSequential()) {
+                        qFatal("[AVQt::Muxer] MOV output format is not available on sequential output devices like sockets");
+                    }
+                    outputFormat = "mov";
+                    break;
+                case FORMAT::MKV:
+                    outputFormat = "matroska";
+                    break;
+                case FORMAT::WEBM:
+                    outputFormat = "webm";
+                    break;
+                case FORMAT::MPEGTS:
+                    outputFormat = "mpegts";
+                    break;
+                case FORMAT::INVALID:
+                    qFatal("[AVQt::Muxer] FORMAT::INVALID is just a placeholder, don't pass it as an argument");
+                    break;
+            }
+
             d->m_pFormatContext = avformat_alloc_context();
-            d->m_pFormatContext->oformat = av_guess_format("mp4", "", nullptr);
+            d->m_pFormatContext->oformat = av_guess_format(outputFormat.toLocal8Bit().data(), "", nullptr);
             d->m_pIOBuffer = static_cast<uint8_t *>(av_malloc(MuxerPrivate::IOBUF_SIZE));
             d->m_pIOContext = avio_alloc_context(d->m_pIOBuffer, MuxerPrivate::IOBUF_SIZE, 1, d->m_outputDevice, nullptr,
                                                  &MuxerPrivate::writeToIO, &MuxerPrivate::seekIO);
diff --git a/AVQt/output/Muxer.h b/AVQt/output/Muxer.h
index 09d76da55fde2dd1b5408aeea2b2dc15d3349f67..df439442ed93bedf04d0a0baf9778c7325ef3f5f 100644
--- a/AVQt/output/Muxer.h
+++ b/AVQt/output/Muxer.h
@@ -19,11 +19,12 @@ namespace AVQt {
 
         Q_DECLARE_PRIVATE(AVQt::Muxer)
 
-    protected:
-        void run() Q_DECL_OVERRIDE;
-
     public:
-        explicit Muxer(QIODevice *outputDevice, QObject *parent = nullptr);
+        enum class FORMAT {
+            MP4, MOV, MKV, WEBM, MPEGTS, INVALID
+        };
+
+        explicit Muxer(QIODevice *outputDevice, FORMAT format, QObject *parent = nullptr);
 
         Muxer(Muxer &) = delete;
 
@@ -46,6 +47,9 @@ namespace AVQt {
 
         void operator=(const Muxer &) = delete;
 
+    protected:
+        void run() Q_DECL_OVERRIDE;
+
     signals:
 
         void started() Q_DECL_OVERRIDE;
diff --git a/AVQt/output/OpenGLRenderer.cpp b/AVQt/output/OpenGLRenderer.cpp
index d78699bc6edecea2909780f3839ffd8c735ef741..ef995e49d4b1c6f211f11daffb5ec5f9dfa0d913 100644
--- a/AVQt/output/OpenGLRenderer.cpp
+++ b/AVQt/output/OpenGLRenderer.cpp
@@ -6,21 +6,82 @@
 #include "OpenGLRenderer.h"
 
 #include <QtGui>
+#include <QImage>
 
+#include <va/va.h>
+#include <va/va_drmcommon.h>
+#include <libdrm/drm_fourcc.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GL/glu.h>
+#include <cstdio>
+#include <unistd.h>
+#include <iostream>
+
+#pragma clang diagnostic push
+#pragma ide diagnostic ignored "HidingNonVirtualFunction"
 extern "C" {
 #include <libavutil/frame.h>
 #include <libavcodec/avcodec.h>
 #include <libavutil/imgutils.h>
 #include <libavutil/hwcontext.h>
+#include <libavutil/hwcontext_vaapi.h>
 }
 
+#define LOOKUP_FUNCTION(type, func) \
+        type func = (type) eglGetProcAddress(#func); \
+        if (!(func)) { qFatal("eglGetProcAddress(" #func ")"); }
+
 static void loadResources() {
     Q_INIT_RESOURCE(resources);
 }
 
+static int closefd(int fd) {
+    return close(fd);
+}
+
+std::string eglErrorString(EGLint error) {
+    switch (error) {
+        case EGL_SUCCESS:
+            return "No error";
+        case EGL_NOT_INITIALIZED:
+            return "EGL not initialized or failed to initialize";
+        case EGL_BAD_ACCESS:
+            return "Resource inaccessible";
+        case EGL_BAD_ALLOC:
+            return "Cannot allocate resources";
+        case EGL_BAD_ATTRIBUTE:
+            return "Unrecognized attribute or attribute value";
+        case EGL_BAD_CONTEXT:
+            return "Invalid EGL context";
+        case EGL_BAD_CONFIG:
+            return "Invalid EGL frame buffer configuration";
+        case EGL_BAD_CURRENT_SURFACE:
+            return "Current surface is no longer valid";
+        case EGL_BAD_DISPLAY:
+            return "Invalid EGL display";
+        case EGL_BAD_SURFACE:
+            return "Invalid surface";
+        case EGL_BAD_MATCH:
+            return "Inconsistent arguments";
+        case EGL_BAD_PARAMETER:
+            return "Invalid argument";
+        case EGL_BAD_NATIVE_PIXMAP:
+            return "Invalid native pixmap";
+        case EGL_BAD_NATIVE_WINDOW:
+            return "Invalid native window";
+        case EGL_CONTEXT_LOST:
+            return "Context lost";
+        default:
+            return "Unknown error " + std::to_string(int(error));
+    }
+}
+
 namespace AVQt {
-    OpenGLRenderer::OpenGLRenderer(QWindow *parent) : QOpenGLWindow(QOpenGLWindow::NoPartialUpdate, parent),
+    OpenGLRenderer::OpenGLRenderer(QWidget *parent) : QOpenGLWidget(parent),
                                                       d_ptr(new OpenGLRendererPrivate(this)) {
+        setAttribute(Qt::WA_QuitOnClose);
     }
 
     [[maybe_unused]] [[maybe_unused]] OpenGLRenderer::OpenGLRenderer(OpenGLRendererPrivate &p) : d_ptr(&p) {
@@ -50,7 +111,15 @@ namespace AVQt {
     }
 
     int OpenGLRenderer::deinit(IFrameSource *source) {
-        Q_UNUSED(source)
+        Q_D(AVQt::OpenGLRenderer);
+        stop(source);
+
+        if (d->m_pQSVDerivedDeviceContext) {
+            av_buffer_unref(&d->m_pQSVDerivedFramesContext);
+            av_buffer_unref(&d->m_pQSVDerivedDeviceContext);
+        }
+
+        delete d->m_clock;
 
         return 0;
     }
@@ -65,7 +134,7 @@ namespace AVQt {
             qDebug("Started renderer");
 
             QMetaObject::invokeMethod(this, "showNormal", Qt::QueuedConnection);
-            QMetaObject::invokeMethod(this, "requestActivate", Qt::QueuedConnection);
+//            QMetaObject::invokeMethod(this, "requestActivate", Qt::QueuedConnection);
             QMetaObject::invokeMethod(this, "update", Qt::QueuedConnection);
 
             started();
@@ -81,7 +150,13 @@ namespace AVQt {
         if (d->m_running.compare_exchange_strong(shouldBeCurrent, false)) {
             hide();
 
-            d->m_clock->stop();
+            if (d->m_currentFrame) {
+                av_frame_free(&d->m_currentFrame);
+            }
+
+            if (d->m_clock) {
+                d->m_clock->stop();
+            }
 
             {
                 QMutexLocker lock(&d->m_renderQueueMutex);
@@ -101,18 +176,15 @@ namespace AVQt {
             d->m_vbo.destroy();
             d->m_vao.destroy();
 
-//                d->m_yTexture->destroy();
             delete d->m_yTexture;
-
-
-//                d->m_uTexture->destroy();
             delete d->m_uTexture;
-
-
-
-//                d->m_yTexture->destroy();
             delete d->m_vTexture;
 
+            if (d->m_EGLImages[0]) {
+                for (auto &EGLImage : d->m_EGLImages) {
+                    eglDestroyImage(d->m_EGLDisplay, EGLImage);
+                }
+            }
 
             stopped();
             return 0;
@@ -142,30 +214,73 @@ namespace AVQt {
         return d->m_paused.load();
     }
 
-    void OpenGLRenderer::onFrame(IFrameSource *source, AVFrame *frame, int64_t duration) {
+    void OpenGLRenderer::onFrame(IFrameSource *source, AVFrame *frame, int64_t duration, AVBufferRef *pDeviceCtx) {
+        Q_D(AVQt::OpenGLRenderer);
         Q_UNUSED(source)
         Q_UNUSED(duration)
 
-        Q_D(AVQt::OpenGLRenderer);
+        QMutexLocker onFrameLock{&d->m_onFrameMutex};
+
+        bool shouldBe = true;
 
         QPair<AVFrame *, int64_t> newFrame;
 
         newFrame.first = av_frame_alloc();
 //        av_frame_ref(newFrame.first, frame);
-        char strBuf[64];
+        constexpr auto strBufSize = 64;
+        char strBuf[strBufSize];
         qDebug("Pixel format: %s", av_get_pix_fmt_string(strBuf, 64, static_cast<AVPixelFormat>(frame->format)));
         switch (frame->format) {
-            case AV_PIX_FMT_VAAPI:
-            case AV_PIX_FMT_DRM_PRIME:
+//            case AV_PIX_FMT_VAAPI:
+//            case AV_PIX_FMT_DRM_PRIME:
             case AV_PIX_FMT_DXVA2_VLD:
                 qDebug("Transferring frame from GPU to CPU");
                 av_hwframe_transfer_data(newFrame.first, frame, 0);
                 break;
+            case AV_PIX_FMT_QSV:
+                qDebug("[AVQt::OpenGLRenderer] Mapping QSV frame to VAAPI for rendering");
+#ifdef Q_OS_LINUX
+                if (!d->m_pQSVDerivedDeviceContext) {
+                    int ret = av_hwdevice_ctx_create(&d->m_pQSVDerivedDeviceContext, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr,
+                                                     0);
+                    if (ret != 0) {
+                        qFatal("[AVQt::OpenGLRenderer] %i: Could not create derived VAAPI context: %s", ret,
+                               av_make_error_string(strBuf, strBufSize, ret));
+                    }
+                    d->m_pQSVDerivedFramesContext = av_hwframe_ctx_alloc(d->m_pQSVDerivedDeviceContext);
+                    auto framesCtx = reinterpret_cast<AVHWFramesContext *>(d->m_pQSVDerivedFramesContext->data);
+                    auto framesCtxOld = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
+                    framesCtx->sw_format = framesCtxOld->sw_format;
+                    framesCtx->format = AV_PIX_FMT_VAAPI;
+                    framesCtx->width = framesCtxOld->width;
+                    framesCtx->height = framesCtxOld->height;
+                    framesCtx->initial_pool_size = framesCtxOld->initial_pool_size;
+
+                    ret = av_hwframe_ctx_init(d->m_pQSVDerivedFramesContext);
+
+                    if (ret != 0) {
+                        qFatal("[AVQt::OpenGLRenderer] %i: Could not create derived frames context: %s", ret,
+                               av_make_error_string(strBuf, strBufSize, ret));
+                    }
+                }
+                newFrame.first->hw_frames_ctx = av_buffer_ref(d->m_pQSVDerivedFramesContext);
+                av_hwframe_map(newFrame.first, frame, AV_HWFRAME_MAP_READ);
+#else
+                qFatal("[AVQt::OpenGLRenderer] Mapping QSV frame to other than VAAPI is currently not supported");
+#endif
+                break;
             default:
                 qDebug("Referencing frame");
                 av_frame_ref(newFrame.first, frame);
                 break;
         }
+
+        if (d->m_firstFrame.compare_exchange_strong(shouldBe, false) && newFrame.first->format == AV_PIX_FMT_VAAPI) {
+            d->m_pVAContext = static_cast<AVVAAPIDeviceContext *>(reinterpret_cast<AVHWDeviceContext *>(pDeviceCtx->data)->hwctx);
+            d->m_VADisplay = d->m_pVAContext->display;
+            av_buffer_unref(&pDeviceCtx);
+        }
+
         newFrame.first->pts = frame->pts;
         newFrame.second = duration;
 //        av_frame_unref(frame);
@@ -174,7 +289,7 @@ namespace AVQt {
         //qDebug() << "Pixel format:" << av_get_pix_fmt_string(strBuf, 64, static_cast<AVPixelFormat>(frame->format));
 
 
-        while (d->m_renderQueue.size() >= 100) {
+        while (d->m_renderQueue.size() >= 4) {
             QThread::msleep(4);
         }
 
@@ -187,6 +302,54 @@ namespace AVQt {
 
         loadResources();
 
+        EGLint visual_attr[] = {
+                EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+                EGL_RED_SIZE, 8,
+                EGL_GREEN_SIZE, 8,
+                EGL_BLUE_SIZE, 8,
+                EGL_ALPHA_SIZE, 8,
+                EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
+                EGL_NONE
+        };
+        d->m_EGLDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+//        if (d->m_EGLDisplay == EGL_NO_DISPLAY) {
+//            qDebug("Could not get default EGL display, connecting to X-Server");
+//            Display *display = XOpenDisplay(nullptr);
+//            if (!display) {
+//                qFatal("Could not get X11 display");
+//            }
+//            d->m_EGLDisplay = eglGetDisplay(static_cast<EGLNativeDisplayType>(display));
+        if (d->m_EGLDisplay == EGL_NO_DISPLAY) {
+            qFatal("Could not get EGL display: %s", eglErrorString(eglGetError()).c_str());
+        }
+//        }
+        if (!eglInitialize(d->m_EGLDisplay, nullptr, nullptr)) {
+            qFatal("eglInitialize");
+        }
+        if (!eglBindAPI(EGL_OPENGL_API)) {
+            qFatal("eglBindAPI");
+        }
+
+        EGLConfig cfg;
+        EGLint cfg_count;
+        if (!eglChooseConfig(d->m_EGLDisplay, visual_attr, &cfg, 1, &cfg_count) || (cfg_count < 1)) {
+            qFatal("eglChooseConfig: %s", eglErrorString(eglGetError()).c_str());
+        }
+//        EGLint ctx_attr[] = {
+//                EGL_CONTEXT_OPENGL_PROFILE_MASK,
+//                EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT,
+//                EGL_CONTEXT_MAJOR_VERSION, 3,
+//                EGL_CONTEXT_MINOR_VERSION, 3,
+//                EGL_CONTEXT_OPENGL_DEBUG, EGL_TRUE,
+//                EGL_NONE
+//        };
+//        d->m_EGLContext = eglCreateContext(d->m_EGLDisplay, cfg, EGL_NO_CONTEXT, ctx_attr);
+//        if (d->m_EGLContext == EGL_NO_CONTEXT) {
+//            qFatal("eglCreateContext");
+//        }
+
+        qDebug("EGL Version: %s", eglQueryString(d->m_EGLDisplay, EGL_VERSION));
+
         QByteArray shaderVersionString;
 
         if (context()->isOpenGLES()) {
@@ -220,94 +383,30 @@ namespace AVQt {
         d->m_program->setUniformValue("textureU", 1);
         d->m_program->setUniformValue("textureV", 2);
         d->m_program->release();
+    }
 
-        float vertices[] = {
-                1, 1, 0,   // top right
-                1, -1, 0,   // bottom right
-                -1, -1, 0,  // bottom left
-                -1, 1, 0   // top left
-        };
-
-        float vertTexCoords[] = {
-                0, 0,
-                1, 1,
-                0, 1,
-                1, 0
-        };
-
-//    QColor vertexColors[] = {
-//        QColor(0xf6, 0xa5, 0x09, 128),
-//        QColor(0xcb, 0x2d, 0xde, 128),av_q2d(av_inv_q(framerate))
-//        QColor(0x0e, 0xee, 0xd1, 128),
-//        QColor(0x06, 0x89, 0x18, 128)
-//    };
+#pragma clang diagnostic push
+#pragma ide diagnostic ignored "modernize-use-auto"
 
-        std::vector<float> vertexBufferData(5 * 4);  // 8 entries per vertex * 4 vertices
+    void OpenGLRenderer::paintGL() {
+        Q_D(AVQt::OpenGLRenderer);
+//        auto t1 = std::chrono::high_resolution_clock::now();
 
-        float *buf = vertexBufferData.data();
+        LOOKUP_FUNCTION(PFNEGLCREATEIMAGEKHRPROC, eglCreateImageKHR)
+        LOOKUP_FUNCTION(PFNEGLDESTROYIMAGEKHRPROC, eglDestroyImageKHR)
+        LOOKUP_FUNCTION(PFNGLEGLIMAGETARGETTEXTURE2DOESPROC, glEGLImageTargetTexture2DOES)
 
-        for (int v = 0; v < 4; ++v, buf += 5) {
-            buf[0] = vertices[3 * v];
-            buf[1] = vertices[3 * v + 1];
-            buf[2] = vertices[3 * v + 2];
 
-            buf[3] = vertTexCoords[v];
-            buf[4] = vertTexCoords[v + 1];
+        if (d->m_currentFrame) {
+            int display_width = width();
+            int display_height = (width() * d->m_currentFrame->height + d->m_currentFrame->width / 2) / d->m_currentFrame->width;
+            if (display_height > height()) {
+                display_width = (height() * d->m_currentFrame->width + d->m_currentFrame->height / 2) / d->m_currentFrame->height;
+                display_height = height();
+            }
+            glViewport((width() - display_width) / 2, (height() - display_height) / 2, display_width, display_height);
         }
 
-        d->m_vbo = QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
-        d->m_vbo.create();
-        //    QObject::connect(&renderer, &AVQt::OpenGLRenderer::started, [&]() {
-        d->m_vbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
-        d->m_vbo.bind();
-
-        d->m_vbo.allocate(vertexBufferData.data(), static_cast<int>(vertexBufferData.size() * sizeof(float)));
-
-        d->m_vao.create();
-        d->m_vao.bind();
-
-        uint indices[] = {
-                0, 1, 3, // first tri
-                1, 2, 3  // second tri
-        };
-
-        d->m_ibo = QOpenGLBuffer(QOpenGLBuffer::IndexBuffer);
-        d->m_ibo.create();
-        d->m_ibo.setUsagePattern(QOpenGLBuffer::StaticDraw);
-        d->m_ibo.bind();
-        d->m_ibo.allocate(indices, sizeof(indices));
-
-
-        int stride = 5 * sizeof(float);
-
-        // layout location 0 - vec3 with coords
-        d->m_program->enableAttributeArray(0);
-        d->m_program->setAttributeBuffer(0, GL_FLOAT, 0, 3, stride);
-
-        // layout location 1 - vec3 with colors
-//    d->m_program->enableAttributeArray(1);
-//    int colorOffset = 3 * sizeof(float);
-//    d->m_program->setAttributeBuffer(1, GL_FLOAT, colorOffset, 3, stride);
-
-        // layout location 1 - vec2 with texture coordinates
-        d->m_program->enableAttributeArray(1);
-        int texCoordsOffset = 3 * sizeof(float);
-        d->m_program->setAttributeBuffer(1, GL_FLOAT, texCoordsOffset, 2, stride);
-
-//                // layout location 2 - int with texture id
-//                d->m_program->enableAttributeArray(2);
-//                d->m_program->setAttributeValue(2, d->m_yTexture->textureId());
-
-        // Release (unbind) all
-
-        d->m_vbo.release();
-        d->m_vao.release();
-    }
-
-    void OpenGLRenderer::paintGL() {
-        Q_D(AVQt::OpenGLRenderer);
-//        auto t1 = std::chrono::high_resolution_clock::now();
-
 //         Clear background
         glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
         glClear(GL_COLOR_BUFFER_BIT);
@@ -332,7 +431,6 @@ namespace AVQt {
                 }
                 if (d->m_updateRequired.load() && !d->m_renderQueue.isEmpty()) {
                     d->m_updateRequired.store(false);
-//                    qDebug("Adding duration %ld ms to position", d->m_currentFrameTimeout);
                     QPair<AVFrame *, int64_t> frame = d->m_renderQueue.dequeue();
                     while (!d->m_renderQueue.isEmpty()) {
                         if (frame.first->pts <= d->m_updateTimestamp.load()) {
@@ -362,150 +460,343 @@ namespace AVQt {
                         }
 
                         if (d->m_currentFrame) {
-                            if (d->m_currentFrame->format == AV_PIX_FMT_BGRA) {
-                                av_freep(&d->m_currentFrame->data[0]);
-                                av_frame_free(&d->m_currentFrame);
-                                d->m_currentFrame = nullptr;
-                            } else {
-                                av_frame_unref(d->m_currentFrame);
-                                av_frame_free(&d->m_currentFrame);
-                                d->m_currentFrame = nullptr;
-                            }
+                            av_frame_free(&d->m_currentFrame);
                         }
 
                         d->m_currentFrame = frame.first;
-                        d->m_currentFrameTimeout = frame.second;
                     }
 
                     if (firstFrame) {
-                        bool VTexActive = false, UTexActive = false;
-                        QSize YSize, USize, VSize;
-                        QOpenGLTexture::TextureFormat textureFormatY, textureFormatU, textureFormatV;
-                        QOpenGLTexture::PixelFormat pixelFormatY, pixelFormatU, pixelFormatV;
-                        QOpenGLTexture::PixelType pixelType;
-                        switch (static_cast<AVPixelFormat>(d->m_currentFrame->format)) {
-                            case AV_PIX_FMT_BGRA:
-                                YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
-                                textureFormatY = QOpenGLTexture::RGBA8_UNorm;
-                                pixelFormatY = QOpenGLTexture::BGRA;
-                                pixelType = QOpenGLTexture::UInt8;
+                        // Frame has 64 pixel alignment, set max height coord to cut off additional pixels
+                        float maxTexHeight = 1.0f;
+                        if (d->m_currentFrame->format == AV_PIX_FMT_VAAPI) {
+                            VASurfaceID vaSurfaceId = reinterpret_cast<uintptr_t>(d->m_currentFrame->data[3]);
+                            VAImage vaImage;
+                            vaDeriveImage(d->m_VADisplay, vaSurfaceId, &vaImage);
+                            maxTexHeight = static_cast<float>(d->m_currentFrame->height * 1.0 / (vaImage.height + 2.0));
+                            vaDestroyImage(d->m_VADisplay, vaImage.image_id);
+                        }
+
+                        float vertices[] = {
+                                1, 1, 0,   // top right
+                                1, -1, 0,   // bottom right
+                                -1, -1, 0,  // bottom left
+                                -1, 1, 0   // top left
+                        };
+
+                        float vertTexCoords[] = {
+                                0, 0,
+                                maxTexHeight, maxTexHeight,
+                                0, 1,
+                                1, 0
+                        };
+
+                        std::vector<float> vertexBufferData(5 * 4);  // 8 entries per vertex * 4 vertices
+
+                        float *buf = vertexBufferData.data();
+
+                        for (int v = 0; v < 4; ++v, buf += 5) {
+                            buf[0] = vertices[3 * v];
+                            buf[1] = vertices[3 * v + 1];
+                            buf[2] = vertices[3 * v + 2];
+
+                            buf[3] = vertTexCoords[v];
+                            buf[4] = vertTexCoords[v + 1];
+                        }
+
+                        d->m_vbo = QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
+                        d->m_vbo.create();
+                        d->m_vbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
+                        d->m_vbo.bind();
+
+                        d->m_vbo.allocate(vertexBufferData.data(), static_cast<int>(vertexBufferData.size() * sizeof(float)));
+
+                        d->m_vao.create();
+                        d->m_vao.bind();
+
+                        uint indices[] = {
+                                0, 1, 3, // first tri
+                                1, 2, 3  // second tri
+                        };
+
+                        d->m_ibo = QOpenGLBuffer(QOpenGLBuffer::IndexBuffer);
+                        d->m_ibo.create();
+                        d->m_ibo.setUsagePattern(QOpenGLBuffer::StaticDraw);
+                        d->m_ibo.bind();
+                        d->m_ibo.allocate(indices, sizeof(indices));
+
+
+                        int stride = 5 * sizeof(float);
+
+                        // layout location 0 - vec3 with coords
+                        d->m_program->enableAttributeArray(0);
+                        d->m_program->setAttributeBuffer(0, GL_FLOAT, 0, 3, stride);
+
+                        // layout location 1 - vec2 with texture coordinates
+                        d->m_program->enableAttributeArray(1);
+                        int texCoordsOffset = 3 * sizeof(float);
+                        d->m_program->setAttributeBuffer(1, GL_FLOAT, texCoordsOffset, 2, stride);
+
+                        // Release (unbind) all
+                        d->m_vbo.release();
+                        d->m_vao.release();
+                        if (d->m_currentFrame->format == AV_PIX_FMT_VAAPI) {
+                            glGenTextures(2, d->m_textures);
+                            for (const auto &texture : d->m_textures) {
+                                glBindTexture(GL_TEXTURE_2D, texture);
+                                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+                                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+                                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+                                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+                            }
+                            d->m_program->bind();
+                            d->m_program->setUniformValue("inputFormat", 1);
+                            d->m_program->release();
+                        } else {
+                            bool VTexActive = false, UTexActive = false;
+                            QSize YSize, USize, VSize;
+                            QOpenGLTexture::TextureFormat textureFormatY, textureFormatU, textureFormatV;
+                            QOpenGLTexture::PixelFormat pixelFormatY, pixelFormatU, pixelFormatV;
+                            QOpenGLTexture::PixelType pixelType;
+                            switch (static_cast<AVPixelFormat>(d->m_currentFrame->format)) {
+                                case AV_PIX_FMT_BGRA:
+                                    YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
+                                    textureFormatY = QOpenGLTexture::RGBA8_UNorm;
+                                    pixelFormatY = QOpenGLTexture::BGRA;
+                                    pixelType = QOpenGLTexture::UInt8;
+                                    break;
+                                case AV_PIX_FMT_YUV420P:
+                                    YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
+                                    USize = VSize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
+                                    textureFormatY = textureFormatU = textureFormatV = QOpenGLTexture::R8_UNorm;
+                                    pixelFormatY = pixelFormatU = pixelFormatV = QOpenGLTexture::Red;
+                                    pixelType = QOpenGLTexture::UInt8;
+                                    UTexActive = VTexActive = true;
+                                    break;
+                                case AV_PIX_FMT_YUV420P10:
+                                    YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
+                                    USize = VSize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
+                                    textureFormatY = textureFormatU = textureFormatV = QOpenGLTexture::R16_UNorm;
+                                    pixelFormatY = pixelFormatU = pixelFormatV = QOpenGLTexture::Red;
+                                    pixelType = QOpenGLTexture::UInt16;
+                                    UTexActive = VTexActive = true;
+                                    break;
+                                case AV_PIX_FMT_NV12:
+                                    YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
+                                    USize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
+                                    textureFormatY = QOpenGLTexture::R8_UNorm;
+                                    textureFormatU = QOpenGLTexture::RG8_UNorm;
+                                    pixelFormatY = QOpenGLTexture::Red;
+                                    pixelFormatU = QOpenGLTexture::RG;
+                                    pixelType = QOpenGLTexture::UInt8;
+                                    UTexActive = true;
+                                    break;
+                                case AV_PIX_FMT_P010:
+                                    YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
+                                    USize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
+                                    textureFormatY = QOpenGLTexture::R16_UNorm;
+                                    textureFormatU = QOpenGLTexture::RG16_UNorm;
+                                    pixelFormatY = QOpenGLTexture::Red;
+                                    pixelFormatU = QOpenGLTexture::RG;
+                                    pixelType = QOpenGLTexture::UInt16;
+                                    UTexActive = true;
+                                    break;
+                                default:
+                                    qFatal("[AVQt::OpenGLRenderer] Unsupported pixel format");
+                            }
+                            d->m_yTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
+                            d->m_yTexture->setSize(YSize.width(), YSize.height());
+                            d->m_yTexture->setFormat(textureFormatY);
+                            d->m_yTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
+                            d->m_yTexture->allocateStorage(pixelFormatY, pixelType);
+                            if (UTexActive) {
+                                d->m_uTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
+                                d->m_uTexture->setSize(USize.width(), USize.height());
+                                d->m_uTexture->setFormat(textureFormatU);
+                                d->m_uTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
+                                d->m_uTexture->allocateStorage(pixelFormatU, pixelType);
+                            }
+                            if (VTexActive) {
+                                d->m_vTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
+                                d->m_vTexture->setSize(VSize.width(), VSize.height());
+                                d->m_vTexture->setFormat(textureFormatV);
+                                d->m_vTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
+                                d->m_vTexture->allocateStorage(pixelFormatV, pixelType);
+                            }
+                        }
+                    }
+
+                    if (d->m_currentFrame->format == AV_PIX_FMT_VAAPI) {
+                        for (int i = 0; i < 2; ++i) {
+                            glActiveTexture(GL_TEXTURE0 + i);
+                            glBindTexture(GL_TEXTURE_2D, d->m_textures[i]);
+                            eglDestroyImageKHR(d->m_EGLDisplay, d->m_EGLImages[i]);
+                        }
+
+                        VASurfaceID va_surface = reinterpret_cast<uintptr_t>(d->m_currentFrame->data[3]);
+
+//                        VAImage vaImage;
+//                        vaDeriveImage(d->m_VADisplay, va_surface, &vaImage);
+//                        VABufferInfo vaBufferInfo;
+//                        memset(&vaBufferInfo, 0, sizeof(VABufferInfo));
+//                        vaBufferInfo.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
+//                        vaAcquireBufferHandle(d->m_VADisplay, vaImage.buf, &vaBufferInfo);
+
+                        VADRMPRIMESurfaceDescriptor prime;
+                        if (vaExportSurfaceHandle(d->m_VADisplay, va_surface, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
+                                                  VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, &prime) !=
+                            VA_STATUS_SUCCESS) {
+                            qFatal("[AVQt::OpenGLRenderer] Could not export VA surface handle");
+                        }
+                        vaSyncSurface(d->m_VADisplay, va_surface);
+
+                        static uint32_t formats[2];
+                        char strBuf[AV_FOURCC_MAX_STRING_SIZE];
+                        switch (prime.fourcc) {
+//                        switch (vaImage.format.fourcc) {
+                            case VA_FOURCC_P010:
+                                formats[0] = DRM_FORMAT_R16;
+                                formats[1] = DRM_FORMAT_GR1616;
                                 break;
-                            case AV_PIX_FMT_YUV420P:
-                                YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
-                                USize = VSize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
-                                textureFormatY = textureFormatU = textureFormatV = QOpenGLTexture::R8_UNorm;
-                                pixelFormatY = pixelFormatU = pixelFormatV = QOpenGLTexture::Red;
-                                pixelType = QOpenGLTexture::UInt8;
-                                UTexActive = VTexActive = true;
+                            case VA_FOURCC_NV12:
+                                formats[0] = DRM_FORMAT_R8;
+                                formats[1] = DRM_FORMAT_GR88;
                                 break;
-                            case AV_PIX_FMT_YUV420P10:
-                                YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
-                                USize = VSize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
-                                textureFormatY = textureFormatU = textureFormatV = QOpenGLTexture::R16_UNorm;
-                                pixelFormatY = pixelFormatU = pixelFormatV = QOpenGLTexture::Red;
-                                pixelType = QOpenGLTexture::UInt16;
-                                UTexActive = VTexActive = true;
+                            default:
+                                qFatal("Unsupported pixel format: %s", av_fourcc_make_string(strBuf, prime.fourcc));
+//                                qFatal("Unsupported pixel format: %s", av_fourcc_make_string(strBuf, vaImage.format.fourcc));
+                        }
+
+                        for (int i = 0; i < 2; ++i) {
+                            if (prime.layers[i].drm_format != formats[i]) {
+                                qFatal("[AVQt::OpenGLRenderer] Invalid pixel format: %s",
+                                       av_fourcc_make_string(strBuf, prime.layers[i].drm_format));
+                            }
+
+#define LAYER i
+#define PLANE 0
+                            const EGLint img_attr[]{
+                                    EGL_LINUX_DRM_FOURCC_EXT, static_cast<EGLint>(prime.layers[LAYER].drm_format),
+                                    EGL_WIDTH, static_cast<EGLint>(prime.width / (i + 1)),
+                                    EGL_HEIGHT, static_cast<EGLint>(prime.height / (i + 1)),
+                                    EGL_DMA_BUF_PLANE0_FD_EXT, prime.objects[prime.layers[LAYER].object_index[PLANE]].fd,
+                                    EGL_DMA_BUF_PLANE0_OFFSET_EXT, static_cast<EGLint>(prime.layers[LAYER].offset[PLANE]),
+                                    EGL_DMA_BUF_PLANE0_PITCH_EXT, static_cast<EGLint>(prime.layers[LAYER].pitch[PLANE]),
+                                    EGL_NONE
+                            };
+
+//                            const EGLint *img_attr = new EGLint[]{
+//                                    EGL_WIDTH, vaImage.width,
+//                                    EGL_HEIGHT, vaImage.height,
+//                                    EGL_LINUX_DRM_FOURCC_EXT, static_cast<EGLint>(formats[i]),
+//                                    EGL_DMA_BUF_PLANE0_FD_EXT, static_cast<EGLint>(vaBufferInfo.handle),
+//                                    EGL_DMA_BUF_PLANE0_OFFSET_EXT, static_cast<EGLint>(vaImage.offsets[i]),
+//                                    EGL_DMA_BUF_PLANE0_PITCH_EXT, static_cast<EGLint>(vaImage.pitches[i]),
+//                                    EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
+//                                    EGL_NONE
+//                            };
+                            while (eglGetError() != EGL_SUCCESS);
+                            d->m_EGLImages[i] = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, nullptr,
+                                                                  img_attr);
+
+                            auto error = eglGetError();
+                            if (!d->m_EGLImages[i] || d->m_EGLImages[i] == EGL_NO_IMAGE_KHR || error != EGL_SUCCESS) {
+                                qFatal("[AVQt::OpenGLRenderer] Could not create %s EGLImage: %s", (i ? "UV" : "Y"),
+                                       eglErrorString(error).c_str());
+                            }
+#undef LAYER
+#undef PLANE
+
+                            glActiveTexture(GL_TEXTURE0 + i);
+                            glBindTexture(GL_TEXTURE_2D, d->m_textures[i]);
+                            while (glGetError() != GL_NO_ERROR);
+                            glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, d->m_EGLImages[i]);
+                            auto err = glGetError();
+
+                            if (err != GL_NO_ERROR) {
+                                qFatal("Could not map EGL image to OGL texture: %#0.4x, %s", err, gluErrorString(err));
+                            }
+
+//                            void *data = new uint16_t[prime.width * prime.height];
+//
+//                            glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_SHORT, data);
+//                            QImage image(reinterpret_cast<uint8_t *>(data), prime.width, prime.height, QImage::Format_Grayscale16);
+//                            image.save("output.bmp");
+
+//                            exit(0);
+                        }
+//                        vaReleaseBufferHandle(d->m_VADisplay, vaImage.buf);
+//                        vaDestroyImage(d->m_VADisplay, vaImage.image_id);
+                        for (int i = 0; i < (int) prime.num_objects; ++i) {
+                            closefd(prime.objects[i].fd);
+                        }
+                    } else {
+//                    qDebug("Frame duration: %ld ms", d->m_currentFrameTimeout);
+                        if (differentPixFmt) {
+                            d->m_program->bind();
+                        }
+                        d->m_yTexture->bind(0);
+                        if (d->m_uTexture) {
+                            d->m_uTexture->bind(1);
+                        }
+                        if (d->m_vTexture) {
+                            d->m_vTexture->bind(2);
+                        }
+                        switch (d->m_currentFrame->format) {
+                            case AV_PIX_FMT_BGRA:
+                                d->m_yTexture->setData(QOpenGLTexture::PixelFormat::BGRA, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[0]));
+                                if (differentPixFmt) {
+                                    d->m_program->setUniformValue("inputFormat", 0);
+                                }
                                 break;
                             case AV_PIX_FMT_NV12:
-                                YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
-                                USize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
-                                textureFormatY = QOpenGLTexture::R8_UNorm;
-                                textureFormatU = QOpenGLTexture::RG8_UNorm;
-                                pixelFormatY = QOpenGLTexture::Red;
-                                pixelFormatU = QOpenGLTexture::RG;
-                                pixelType = QOpenGLTexture::UInt8;
-                                UTexActive = true;
+                                d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[0]));
+                                d->m_uTexture->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[1]));
+                                if (differentPixFmt) {
+                                    d->m_program->setUniformValue("inputFormat", 1);
+                                }
                                 break;
                             case AV_PIX_FMT_P010:
-                                YSize = QSize(d->m_currentFrame->width, d->m_currentFrame->height);
-                                USize = QSize(d->m_currentFrame->width / 2, d->m_currentFrame->height / 2);
-                                textureFormatY = QOpenGLTexture::R16_UNorm;
-                                textureFormatU = QOpenGLTexture::RG16_UNorm;
-                                pixelFormatY = QOpenGLTexture::Red;
-                                pixelFormatU = QOpenGLTexture::RG;
-                                pixelType = QOpenGLTexture::UInt16;
-                                UTexActive = true;
+                                d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[0]));
+                                d->m_uTexture->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt16,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[1]));
+                                if (differentPixFmt) {
+                                    d->m_program->setUniformValue("inputFormat", 1);
+                                }
                                 break;
-                            default:
-                                qFatal("[AVQt::OpenGLRenderer] Unsupported pixel format");
+                            case AV_PIX_FMT_YUV420P:
+                                d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[0]));
+                                d->m_uTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[1]));
+                                d->m_vTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[2]));
+                                if (differentPixFmt) {
+                                    d->m_program->setUniformValue("inputFormat", 2);
+                                }
                                 break;
+                            case AV_PIX_FMT_YUV420P10:
+                                d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[0]));
+                                d->m_uTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[1]));
+                                d->m_vTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16,
+                                                       const_cast<const uint8_t *>(d->m_currentFrame->data[2]));
+                                if (differentPixFmt) {
+                                    d->m_program->setUniformValue("inputFormat", 3);
+                                }
+                                break;
+                            default:
+                                qFatal("Pixel format not supported");
                         }
-                        d->m_yTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
-                        d->m_yTexture->setSize(YSize.width(), YSize.height());
-                        d->m_yTexture->setFormat(textureFormatY);
-                        d->m_yTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
-                        d->m_yTexture->allocateStorage(pixelFormatY, pixelType);
-                        if (UTexActive) {
-                            d->m_uTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
-                            d->m_uTexture->setSize(USize.width(), USize.height());
-                            d->m_uTexture->setFormat(textureFormatU);
-                            d->m_uTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
-                            d->m_uTexture->allocateStorage(pixelFormatU, pixelType);
-                        }
-                        if (VTexActive) {
-                            d->m_vTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
-                            d->m_vTexture->setSize(VSize.width(), VSize.height());
-                            d->m_vTexture->setFormat(textureFormatV);
-                            d->m_vTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
-                            d->m_vTexture->allocateStorage(pixelFormatV, pixelType);
+                        if (differentPixFmt) {
+                            d->m_program->release();
                         }
                     }
-//                    qDebug("Frame duration: %ld ms", d->m_currentFrameTimeout);
-                    if (differentPixFmt) {
-                        d->m_program->bind();
-                    }
-                    d->m_yTexture->bind(0);
-                    if (d->m_uTexture) {
-                        d->m_uTexture->bind(1);
-                    }
-                    if (d->m_vTexture) {
-                        d->m_vTexture->bind(2);
-                    }
-                    switch (d->m_currentFrame->format) {
-                        case AV_PIX_FMT_BGRA:
-                            d->m_yTexture->setData(QOpenGLTexture::PixelFormat::BGRA, QOpenGLTexture::UInt8,
-                                                   d->m_currentFrame->data[0]);
-                            if (differentPixFmt) {
-                                d->m_program->setUniformValue("inputFormat", 0);
-                            }
-                            break;
-                        case AV_PIX_FMT_NV12:
-                            d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, d->m_currentFrame->data[0]);
-                            d->m_uTexture->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt8, d->m_currentFrame->data[1]);
-                            if (differentPixFmt) {
-                                d->m_program->setUniformValue("inputFormat", 1);
-                            }
-                            break;
-                        case AV_PIX_FMT_P010:
-                            d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16, d->m_currentFrame->data[0]);
-                            d->m_uTexture->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt16, d->m_currentFrame->data[1]);
-                            if (differentPixFmt) {
-                                d->m_program->setUniformValue("inputFormat", 1);
-                            }
-                            break;
-                        case AV_PIX_FMT_YUV420P:
-                            d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, d->m_currentFrame->data[0]);
-                            d->m_uTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, d->m_currentFrame->data[1]);
-                            d->m_vTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, d->m_currentFrame->data[2]);
-                            if (differentPixFmt) {
-                                d->m_program->setUniformValue("inputFormat", 2);
-                            }
-                            break;
-                        case AV_PIX_FMT_YUV420P10:
-                            // TODO: Fix crappy and low-res colors with 10bit YUV420P10LE, caused by data in lsb
-                            d->m_yTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16, d->m_currentFrame->data[0]);
-                            d->m_uTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16, d->m_currentFrame->data[1]);
-                            d->m_vTexture->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt16, d->m_currentFrame->data[2]);
-                            if (differentPixFmt) {
-                                d->m_program->setUniformValue("inputFormat", 3);
-                            }
-                            break;
-                        default:
-                            qFatal("Pixel format not supported");
-                    }
-                    if (differentPixFmt) {
-                        d->m_program->release();
-                    }
                 }
             }
         } else if (d->m_clock) {
@@ -525,13 +816,20 @@ namespace AVQt {
         if (d->m_currentFrame) {
             qDebug("Drawing frame with PTS: %lld", static_cast<long long>(d->m_currentFrame->pts));
             d->m_program->bind();
-            if (!d->m_yTexture->isBound(0)) {
-                d->m_yTexture->bind(0);
-                if (d->m_uTexture) {
-                    d->m_uTexture->bind(1);
-                }
-                if (d->m_vTexture) {
-                    d->m_vTexture->bind(2);
+            if (d->m_currentFrame->format == AV_PIX_FMT_VAAPI) {
+                glActiveTexture(GL_TEXTURE0);
+                glBindTexture(GL_TEXTURE_2D, d->m_textures[0]);
+                glActiveTexture(GL_TEXTURE1);
+                glBindTexture(GL_TEXTURE_2D, d->m_textures[1]);
+            } else {
+                if (!d->m_yTexture->isBound(0)) {
+                    d->m_yTexture->bind(0);
+                    if (d->m_uTexture) {
+                        d->m_uTexture->bind(1);
+                    }
+                    if (d->m_vTexture) {
+                        d->m_vTexture->bind(2);
+                    }
                 }
             }
 
@@ -547,12 +845,16 @@ namespace AVQt {
             d->m_vao.release();
             d->m_vbo.release();
             d->m_program->release();
-            d->m_yTexture->release(0);
-            if (d->m_uTexture) {
-                d->m_uTexture->release(1);
-            }
-            if (d->m_vTexture) {
-                d->m_vTexture->release(2);
+            if (d->m_currentFrame->format == AV_PIX_FMT_VAAPI) {
+
+            } else {
+                d->m_yTexture->release(0);
+                if (d->m_uTexture) {
+                    d->m_uTexture->release(1);
+                }
+                if (d->m_vTexture) {
+                    d->m_vTexture->release(2);
+                }
             }
         }
 
@@ -580,16 +882,16 @@ namespace AVQt {
 
 //            qDebug() << "Current timestamp:" << d->m_position.toString("hh:mm:ss.zzz");
 
-        QTime position{0, 0, 0, 0};
         if (d->m_currentFrame) {
-            position = OpenGLRendererPrivate::timeFromMillis(d->m_currentFrame->pts / 1000);
+            d->m_position = OpenGLRendererPrivate::timeFromMillis(d->m_currentFrame->pts / 1000);
         }
 
-        QString overlay(position.toString("hh:mm:ss.zzz") + "/" + d->m_duration.toString("hh:mm:ss.zzz"));
+        QString overlay(d->m_position.toString("hh:mm:ss.zzz") + "/" + d->m_duration.toString("hh:mm:ss.zzz"));
         QFontMetrics fm(roboto);
-        p.fillRect(fm.boundingRect(overlay).translated(static_cast<int>(20), static_cast<int>(20)).adjusted(-5, -5, 5, 5),
-                   QColor(0xFF, 0xFF, 0xFF, 0x48));
-        p.drawText(fm.boundingRect(overlay).translated(static_cast<int>(20), static_cast<int>(20)), overlay);
+        QRect overlayRect = fm.boundingRect(overlay);
+        overlayRect.moveTopLeft({20, 20});
+        p.fillRect(overlayRect.adjusted(-5, 0, 5, 0), QColor(0xFF, 0xFF, 0xFF, 0x48));
+        p.drawText(overlayRect, overlay);
         p.end();
         qDebug() << "Paused:" << (d->m_paused.load() ? "true" : "false");
         if (!d->m_paused.load()) {
@@ -597,19 +899,27 @@ namespace AVQt {
         }
     }
 
+#pragma clang diagnostic pop
+
     void OpenGLRenderer::mouseReleaseEvent(QMouseEvent *event) {
         if (event->button() == Qt::LeftButton) {
             pause(nullptr, !isPaused());
         }
     }
 
+    void OpenGLRenderer::closeEvent(QCloseEvent *event) {
+        QApplication::quit();
+        QWidget::closeEvent(event);
+    }
+
     RenderClock *OpenGLRenderer::getClock() {
         Q_D(AVQt::OpenGLRenderer);
         return d->m_clock;
     }
 
-    GLint OpenGLRendererPrivate::project(GLdouble objx, GLdouble objy, GLdouble objz, const GLdouble model[16], const GLdouble proj[16],
-                                         const GLint viewport[4], GLdouble *winx, GLdouble *winy, GLdouble *winz) {
+    [[maybe_unused]] GLint
+    OpenGLRendererPrivate::project(GLdouble objx, GLdouble objy, GLdouble objz, const GLdouble model[16], const GLdouble proj[16],
+                                   const GLint viewport[4], GLdouble *winx, GLdouble *winy, GLdouble *winz) {
         GLdouble in[4], out[4];
 
         in[0] = objx;
@@ -634,7 +944,7 @@ namespace AVQt {
     }
 
     void OpenGLRendererPrivate::transformPoint(GLdouble *out, const GLdouble *m, const GLdouble *in) {
-#define M(row, col)  m[col*4+row]
+#define M(row, col)  m[(col) * 4 + (row)]
         out[0] = M(0, 0) * in[0] + M(0, 1) * in[1] + M(0, 2) * in[2] + M(0, 3) * in[3];
         out[1] = M(1, 0) * in[0] + M(1, 1) * in[1] + M(1, 2) * in[2] + M(1, 3) * in[3];
         out[2] = M(2, 0) * in[0] + M(2, 1) * in[1] + M(2, 2) * in[2] + M(2, 3) * in[3];
diff --git a/AVQt/output/OpenGLRenderer.h b/AVQt/output/OpenGLRenderer.h
index a872577af3fb93025e832149b43dc68b52218841..5e367657d510e416f12716c2ec41a4fec22eb08c 100644
--- a/AVQt/output/OpenGLRenderer.h
+++ b/AVQt/output/OpenGLRenderer.h
@@ -7,6 +7,8 @@
 #include <QtOpenGL>
 #include <input/IFrameSource.h>
 
+#pragma clang diagnostic push
+#pragma ide diagnostic ignored "HidingNonVirtualFunction"
 extern "C" {
 #include <libavutil/rational.h>
 }
@@ -20,14 +22,14 @@ namespace AVQt {
 
     class RenderClock;
 
-    class OpenGLRenderer : public QOpenGLWindow, public IFrameSink {
+    class OpenGLRenderer : public QOpenGLWidget, public IFrameSink {
     Q_OBJECT
 //        Q_INTERFACES(AVQt::IFrameSink)
 
         Q_DECLARE_PRIVATE(AVQt::OpenGLRenderer)
 
     public:
-        explicit OpenGLRenderer(QWindow *parent = nullptr);
+        explicit OpenGLRenderer(QWidget *parent = nullptr);
 
         OpenGLRenderer(OpenGLRenderer &&other) noexcept;
 
@@ -35,7 +37,7 @@ namespace AVQt {
 
         void operator=(const OpenGLRenderer &) = delete;
 
-        ~OpenGLRenderer() noexcept;
+        ~OpenGLRenderer() noexcept Q_DECL_OVERRIDE;
 
         bool isPaused() Q_DECL_OVERRIDE;
 
@@ -92,7 +94,7 @@ namespace AVQt {
          * @param timebase Source stream time base, if you don't know what this means, you probably don't want to use it.
          * @param framerate Source stream framerate
          */
-        Q_INVOKABLE void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration) Q_DECL_OVERRIDE;
+        Q_INVOKABLE void onFrame(IFrameSource *source, AVFrame *frame, int64_t frameDuration, AVBufferRef *pDeviceCtx) Q_DECL_OVERRIDE;
 
     signals:
 
@@ -111,9 +113,12 @@ namespace AVQt {
 
         void mouseReleaseEvent(QMouseEvent *event) Q_DECL_OVERRIDE;
 
+        void closeEvent(QCloseEvent *event) Q_DECL_OVERRIDE;
+
         OpenGLRendererPrivate *d_ptr;
     };
 }
 
 
-#endif //LIBAVQT_OPENGLRENDERER_H
\ No newline at end of file
+#endif //LIBAVQT_OPENGLRENDERER_H
+#pragma clang diagnostic pop
\ No newline at end of file
diff --git a/AVQt/output/private/Muxer_p.h b/AVQt/output/private/Muxer_p.h
index ca43c3509aa082d74f34d217019e48d850edd489..7aefc087e4a29b00bbe2f80c28231fd7e86529c9 100644
--- a/AVQt/output/private/Muxer_p.h
+++ b/AVQt/output/private/Muxer_p.h
@@ -28,8 +28,10 @@ namespace AVQt {
 
         QIODevice *m_outputDevice{nullptr};
 
+        Muxer::FORMAT m_format{Muxer::FORMAT::INVALID};
+
         QMutex m_initMutex{};
-        static constexpr size_t IOBUF_SIZE{4 * 1024};  // 4 KB
+        static constexpr int64_t IOBUF_SIZE{4 * 1024};  // 4 KB
         uint8_t *m_pIOBuffer{nullptr};
         AVIOContext *m_pIOContext{nullptr};
         AVFormatContext *m_pFormatContext{nullptr};
diff --git a/AVQt/output/private/OpenGLRenderer_p.h b/AVQt/output/private/OpenGLRenderer_p.h
index 90a33dbb94c47b4376c6b5261452abcb876190b9..06d480c8b6e8bbde8bce7b2d3f67603250472b78 100644
--- a/AVQt/output/private/OpenGLRenderer_p.h
+++ b/AVQt/output/private/OpenGLRenderer_p.h
@@ -2,11 +2,24 @@
  * \private
  * \internal
  */
-#include <QtCore>
-#include <QtOpenGL>
 #include "../RenderClock.h"
 #include "../OpenGLRenderer.h"
 
+#include <QtCore>
+#include <QtOpenGL>
+
+#include <va/va.h>
+#include <va/va_x11.h>
+#include <va/va_drmcommon.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+
+extern "C" {
+#include <libavutil/hwcontext_vaapi.h>
+#include <libavutil/hwcontext.h>
+}
+
 
 #ifndef LIBAVQT_OPENGLRENDERER_P_H
 #define LIBAVQT_OPENGLRENDERER_P_H
@@ -32,7 +45,7 @@ namespace AVQt {
     private:
         explicit OpenGLRendererPrivate(OpenGLRenderer *q) : q_ptr(q) {};
 
-        static GLint
+        [[maybe_unused]] static GLint
         project(GLdouble objx, GLdouble objy, GLdouble objz, const GLdouble model[16], const GLdouble[16], const GLint viewport[4],
                 GLdouble *winx, GLdouble *winy, GLdouble *winz);
 
@@ -42,11 +55,11 @@ namespace AVQt {
 
         OpenGLRenderer *q_ptr{nullptr};
 
+        QMutex m_onFrameMutex{};
         QMutex m_renderQueueMutex{};
         QQueue<QPair<AVFrame *, int64_t>> m_renderQueue{};
 
         RenderClock *m_clock{nullptr};
-        int64_t m_currentFrameTimeout{1};
         QTime m_duration{};
         QTime m_position{};
         std::atomic_bool m_updateRequired{true}, m_paused{false}, m_running{false}, m_firstFrame{true};
@@ -56,6 +69,10 @@ namespace AVQt {
         QMutex m_currentFrameMutex{};
         AVFrame *m_currentFrame{nullptr};
 
+        AVBufferRef *m_pQSVDerivedDeviceContext{nullptr};
+        AVBufferRef *m_pQSVDerivedFramesContext{nullptr};
+
+        //OpenGL stuff
         QOpenGLVertexArrayObject m_vao{};
         QOpenGLBuffer m_vbo{}, m_ibo{};
         QOpenGLShaderProgram *m_program{nullptr};
@@ -64,6 +81,13 @@ namespace AVQt {
         static constexpr uint PROGRAM_VERTEX_ATTRIBUTE{0};
         static constexpr uint PROGRAM_TEXCOORD_ATTRIBUTE{1};
 
+        // VAAPI stuff
+        VADisplay m_VADisplay{nullptr};
+        AVVAAPIDeviceContext *m_pVAContext{nullptr};
+        EGLDisplay m_EGLDisplay{nullptr};
+        EGLImage m_EGLImages[2]{};
+        GLuint m_textures[2]{};
+
         friend class OpenGLRenderer;
     };
 }
diff --git a/Player/CMakeLists.txt b/Player/CMakeLists.txt
index 7b5e0e2d7235b717ebc42b1edd318b4aa02de322..e3fa22b28c5b5cf8832722c111f540ad1d17695f 100644
--- a/Player/CMakeLists.txt
+++ b/Player/CMakeLists.txt
@@ -8,7 +8,8 @@ set(CMAKE_CXX_STANDARD 20)
 find_package(Qt5 COMPONENTS Core Gui Widgets OpenGL Concurrent)
 find_package(OpenAL)
 
-#add_compile_options(-O4)
+#add_compile_options(-fsanitize=address)
+#add_link_options(-fsanitize=address)
 
 add_executable(Player main.cpp)
 
@@ -16,7 +17,7 @@ add_executable(Player main.cpp)
 target_link_libraries(Player Qt5::Core Qt5::Gui Qt5::Concurrent Qt5::Widgets Qt5::OpenGL avformat avfilter avutil avcodec avdevice swscale swresample AVQt)
 
 if (LINUX)
-    target_link_libraries(Player openal GL)
+    target_link_libraries(Player openal GL EGL GLU)
 elseif (WIN32)
     target_link_libraries(Player OpenAL32 opengl32)
-endif ()
+endif ()
\ No newline at end of file
diff --git a/Player/main.cpp b/Player/main.cpp
index 83656d8347d3ce3f95d26da4e4d02202162c9c3c..c966b4645ed93fdcea092eec9b7c67d97f9a26a9 100644
--- a/Player/main.cpp
+++ b/Player/main.cpp
@@ -9,7 +9,7 @@
 constexpr auto LOGFILE_LOCATION = "libAVQt.log";
 
 QApplication *app = nullptr;
-std::chrono::time_point<std::chrono::system_clock> start;
+std::chrono::time_point<std::chrono::system_clock> start; // NOLINT(cert-err58-cpp)
 
 void signalHandler(int sigNum) {
     Q_UNUSED(sigNum)
@@ -40,6 +40,7 @@ void messageHandler(QtMsgType type, const QMessageLogContext &context, const QSt
 }
 
 int main(int argc, char *argv[]) {
+    QGuiApplication::setAttribute(Qt::AA_UseOpenGLES);
     app = new QApplication(argc, argv);
     signal(SIGINT, &signalHandler);
     signal(SIGTERM, &signalHandler);
@@ -63,17 +64,17 @@ int main(int argc, char *argv[]) {
     inputFile->open(QIODevice::ReadWrite);
 
     AVQt::Demuxer demuxer(inputFile);
-//    AVQt::AudioDecoder decoder;
-//    AVQt::OpenALAudioOutput output;
+    AVQt::AudioDecoder decoder;
+    AVQt::OpenALAudioOutput output;
 
-//    demuxer.registerCallback(&decoder, AVQt::IPacketSource::CB_AUDIO);
-//    decoder.registerCallback(&output);
+    demuxer.registerCallback(&decoder, AVQt::IPacketSource::CB_AUDIO);
+    decoder.registerCallback(&output);
 
     AVQt::IDecoder *videoDecoder;
     AVQt::IEncoder *videoEncoder;
 #ifdef Q_OS_LINUX
-    videoDecoder = new AVQt::DecoderVAAPI;
-    videoEncoder = new AVQt::EncoderVAAPI("hevc_vaapi");
+    videoDecoder = new AVQt::DecoderQSV;
+    videoEncoder = new AVQt::EncoderVAAPI(AVQt::IEncoder::CODEC::HEVC, 10 * 1000 * 1000);
 #elif defined(Q_OS_WINDOWS)
     videoDecoder = new AVQt::DecoderDXVA2();
 #else
@@ -84,12 +85,12 @@ int main(int argc, char *argv[]) {
     demuxer.registerCallback(videoDecoder, AVQt::IPacketSource::CB_VIDEO);
 //    videoDecoder->registerCallback(videoEncoder);
 
-//    QFile outputFile("output.mp4");
-//    outputFile.open(QIODevice::ReadWrite | QIODevice::Truncate);
-//    outputFile.seek(0);
-//    AVQt::Muxer muxer(&outputFile);
+    QFile outputFile("output.mp4");
+    outputFile.open(QIODevice::ReadWrite | QIODevice::Truncate);
+    outputFile.seek(0);
+    AVQt::Muxer muxer(&outputFile, AVQt::Muxer::FORMAT::MP4);
 
-//    videoEncoder->registerCallback(&muxer, AVQt::IPacketSource::CB_VIDEO);
+    videoEncoder->registerCallback(&muxer, AVQt::IPacketSource::CB_VIDEO);
     videoDecoder->registerCallback(&renderer);
 
     renderer.setMinimumSize(QSize(360, 240));
@@ -109,7 +110,7 @@ int main(int argc, char *argv[]) {
 
     QObject::connect(app, &QApplication::aboutToQuit, [&] {
         demuxer.deinit();
-//        delete videoEncoder;
+        delete videoEncoder;
         delete videoDecoder;
     });