Skip to content
Snippets Groups Projects
Verified Commit 33f027b0 authored by Silas Della Contrada's avatar Silas Della Contrada
Browse files

Completed Muxer (i guess) and disabled B-frames in VAAPIEncoderImpl

parent 256696ae
Branches
No related tags found
1 merge request!17Draft: Resolve "Migrate frame passing to libProcessingGraph"
......@@ -27,8 +27,14 @@
#include "communication/PacketPadParams.hpp"
#include "communication/VideoPadParams.hpp"
#include "common/ContainerFormat.hpp"
#include "common/PixelFormat.hpp"
#include "common/Platform.hpp"
#include "input/Demuxer.hpp"
#include "output/Muxer.hpp"
#include "AVQt/decoder/VideoDecoder.hpp"
#include "AVQt/encoder/VideoEncoder.hpp"
#include "decoder/IVideoDecoderImpl.hpp"
......
......@@ -13,6 +13,8 @@
#include <pgraph/api/Data.hpp>
#include <pgraph/impl/SimpleConsumer.hpp>
#include <pgraph_network/api/PadRegistry.hpp>
#include <QThread>
extern "C" {
......@@ -39,7 +41,7 @@ namespace AVQt {
std::unique_ptr<QIODevice> outputDevice;
};
explicit Muxer(Config config, QObject *parent = nullptr);
explicit Muxer(Config config, std::shared_ptr<pgraph::network::api::PadRegistry> padRegistry, QObject *parent = nullptr);
~Muxer() Q_DECL_OVERRIDE;
[[nodiscard]] bool isOpen() const Q_DECL_OVERRIDE;
......@@ -47,7 +49,7 @@ namespace AVQt {
[[nodiscard]] bool isPaused() const Q_DECL_OVERRIDE;
bool init() Q_DECL_OVERRIDE;
[[maybe_unused]] int64_t createStreamPad(const std::shared_ptr<communication::PacketPadParams> &padData);
[[maybe_unused]] int64_t createStreamPad();
[[maybe_unused]] void destroyStreamPad(int64_t padId);
void consume(int64_t pad, std::shared_ptr<pgraph::api::Data> data) override;
......@@ -67,7 +69,7 @@ namespace AVQt {
void paused(bool state) Q_DECL_OVERRIDE;
protected:
[[maybe_unused]] explicit Muxer(Config config, MuxerPrivate *p, QObject *parent = nullptr);
[[maybe_unused]] explicit Muxer(Config config, std::shared_ptr<pgraph::network::api::PadRegistry> padRegistry, MuxerPrivate *p, QObject *parent = nullptr);
QScopedPointer<MuxerPrivate> d_ptr;
};
}// namespace AVQt
......
......@@ -222,7 +222,7 @@ namespace AVQt {
d->codecContext->width = params.frameSize.width();
d->codecContext->height = params.frameSize.height();
d->codecContext->max_b_frames = 0;
d->codecContext->gop_size = 100;
d->codecContext->gop_size = 20;
d->codecContext->time_base = {1, 1000000};// microseconds
d->codecContext->hw_device_ctx = av_buffer_ref(d->hwDeviceContext.get());
d->codecContext->hw_frames_ctx = av_buffer_ref(d->hwFramesContext.get());
......@@ -291,6 +291,9 @@ namespace AVQt {
}
}
frame->pts = av_rescale_q(frame->pts, {1, 1000000}, d->codecContext->time_base);
// qWarning() << "Frame pts:" << frame->pts;
{
QMutexLocker codecLocker(&d->codecMutex);
auto t1 = std::chrono::high_resolution_clock::now();
......@@ -306,9 +309,9 @@ namespace AVQt {
}
static size_t frameCount = 0;
if (frameCount % 100 == 0) {
qWarning("Encoded frame #%04zu", frameCount);
}
// if (frameCount % 100 == 0) {
qWarning("Encoded frame #%04zu with pts %ld", frameCount, frame->pts);
// }
++frameCount;
d->firstFrame = false;
......@@ -610,11 +613,15 @@ namespace AVQt {
ret = avcodec_receive_packet(p->codecContext.get(), packet.get());
}
av_packet_rescale_ts(packet.get(), p->codecContext->time_base, {1, 1000000});
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
msleep(1);
} else if (ret < 0) {
qWarning() << "Could not receive packet:" << av_make_error_string(strBuf, sizeof(strBuf), ret);
} else {
static size_t packetCount = 0;
qWarning("Got packet #%04zu with pts %ld, dts %ld", packetCount++, packet->pts, packet->dts);
p->q_func()->packetReady(packet);
}
}
......
......@@ -54,7 +54,7 @@ namespace AVQt {
[[nodiscard]] QVector<AVPixelFormat> getInputFormats() const override;
[[nodiscard]] std::shared_ptr<AVCodecParameters> getCodecParameters() const override;
[[nodiscard]] std::shared_ptr<communication::PacketPadParams> getPacketPadParams() const;
[[nodiscard]] std::shared_ptr<communication::PacketPadParams> getPacketPadParams() const override;
signals:
void packetReady(std::shared_ptr<AVPacket> packet) override;
......
......@@ -11,17 +11,26 @@
#include <pgraph/api/PadUserData.hpp>
#include <pgraph_network/impl/RegisteringPadFactory.hpp>
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
}
namespace AVQt {
Muxer::Muxer(Config config, QObject *parent) : QThread(parent), d_ptr(new MuxerPrivate(this)) {
Muxer::Muxer(Config config, std::shared_ptr<pgraph::network::api::PadRegistry> padRegistry, QObject *parent)
: QThread(parent),
SimpleConsumer(pgraph::network::impl::RegisteringPadFactory::factoryFor(padRegistry)),
d_ptr(new MuxerPrivate(this)) {
Q_D(Muxer);
d->init(std::move(config));
}
Muxer::Muxer(Config config, MuxerPrivate *p, QObject *parent) : QThread(parent), d_ptr(p) {
Muxer::Muxer(Config config, std::shared_ptr<pgraph::network::api::PadRegistry> padRegistry, MuxerPrivate *p, QObject *parent)
: QThread(parent),
SimpleConsumer(pgraph::network::impl::RegisteringPadFactory::factoryFor(padRegistry)),
d_ptr(p) {
Q_D(Muxer);
d->init(std::move(config));
}
......@@ -45,27 +54,22 @@ namespace AVQt {
return;
}
pFormatCtx.reset(avformat_alloc_context());
AVFormatContext *formatContext = avformat_alloc_context();
int ret = avformat_alloc_output_context2(&formatContext, pOutputFormat, config.containerFormat, nullptr);
if (ret < 0) {
char strBuf[AV_ERROR_MAX_STRING_SIZE];
qWarning() << "[Muxer] Could not allocate output context: " << av_make_error_string(strBuf, AV_ERROR_MAX_STRING_SIZE, ret);
avformat_free_context(formatContext);
return;
}
pFormatCtx.reset(formatContext);
pBuffer = static_cast<uint8_t *>(av_malloc(MuxerPrivate::BUFFER_SIZE));
pIOCtx.reset(avio_alloc_context(pBuffer, MuxerPrivate::BUFFER_SIZE, AVIO_FLAG_WRITE, this, nullptr, MuxerPrivate::writeIO, MuxerPrivate::seekIO));
pFormatCtx->pb = pIOCtx.get();
pFormatCtx->flags |= AVFMT_FLAG_CUSTOM_IO;
pFormatCtx->oformat = const_cast<AVOutputFormat *>(pOutputFormat);
int ret = avformat_init_output(pFormatCtx.get(), nullptr);
if (ret < 0) {
char strBuf[AV_ERROR_MAX_STRING_SIZE];
qWarning() << "[Muxer] Could not init output:" << av_make_error_string(strBuf, AV_ERROR_MAX_STRING_SIZE, ret);
goto fail;
}
return;
fail:
qWarning() << "[Muxer] Could not initialize muxer";
av_free(pIOCtx->buffer);
pIOCtx.reset();
pFormatCtx.reset();
pBuffer = nullptr;
}
Muxer::~Muxer() {
......@@ -74,14 +78,6 @@ namespace AVQt {
if (d->running) {
Muxer::stop();
}
if (d->pBuffer) {
av_free(d->pBuffer);
}
d->pFormatCtx.reset();
d->pIOCtx.reset();
d->outputDevice->close();
}
bool Muxer::init() {
......@@ -95,7 +91,13 @@ namespace AVQt {
}
void Muxer::close() {
// Empty
Q_D(Muxer);
printf("[Muxer] Destroying Muxer\n");
av_write_trailer(d->pFormatCtx.get());
d->pFormatCtx.reset();
d->pIOCtx.reset();
d->outputDevice->close();
}
bool Muxer::start() {
......@@ -107,6 +109,7 @@ namespace AVQt {
if (ret < 0) {
char strBuf[AV_ERROR_MAX_STRING_SIZE];
qWarning() << "[Muxer] Could not write header:" << av_make_error_string(strBuf, AV_ERROR_MAX_STRING_SIZE, ret);
d->running = false;
return false;
}
d->paused = false;
......@@ -153,7 +156,7 @@ namespace AVQt {
auto msg = std::dynamic_pointer_cast<communication::Message>(data);
switch (msg->getAction()) {
case communication::Message::Action::INIT: {
if (!d->initStream(pad, msg->getPayload("packetParams").value<std::shared_ptr<communication::PacketPadParams>>())) {
if (!d->initStream(pad, msg->getPayload("packetParams").value<std::shared_ptr<const communication::PacketPadParams>>())) {
qWarning() << "[Muxer] failed to open stream" << pad;
}
break;
......@@ -179,14 +182,18 @@ namespace AVQt {
d->enqueueData(packet);
break;
}
case communication::Message::Action::CLEANUP: {
close();
break;
}
// TODO: reset
default:// CLEANUP will be handled by the destructor
default:
break;
}
}
}
[[maybe_unused]] int64_t Muxer::createStreamPad(const std::shared_ptr<communication::PacketPadParams> &padData) {
[[maybe_unused]] int64_t Muxer::createStreamPad() {
Q_D(Muxer);
if (d->running) {
......@@ -194,7 +201,7 @@ namespace AVQt {
return pgraph::api::INVALID_PAD_ID;
}
auto padId = pgraph::impl::SimpleConsumer::createInputPad(padData);
auto padId = pgraph::impl::SimpleConsumer::createInputPad(pgraph::api::PadUserData::emptyUserData());
if (padId == pgraph::api::INVALID_PAD_ID) {
qWarning() << "[Muxer] failed to create pad";
return pgraph::api::INVALID_PAD_ID;
......@@ -265,6 +272,9 @@ namespace AVQt {
lock.unlock();
av_packet_rescale_ts(packet.get(), {1, 1000000}, d->pFormatCtx->streams[packet->stream_index]->time_base);
qWarning() << "Muxing packet pts:" << packet->pts << "dts:" << packet->dts;
int ret = av_interleaved_write_frame(d->pFormatCtx.get(), packet.get());
if (ret == AVERROR(EAGAIN)) {
continue;
......@@ -289,9 +299,7 @@ namespace AVQt {
void MuxerPrivate::destroyAVIOContext(AVIOContext *ctx) {
if (ctx) {
if (avio_closep(&ctx) != 0) {
qWarning() << "Error closing AVIOContext";
}
avio_context_free(&ctx);
}
}
......@@ -342,7 +350,7 @@ namespace AVQt {
return result ? d->outputDevice->pos() : AVERROR_UNKNOWN;
}
bool MuxerPrivate::initStream(int64_t padId, const std::shared_ptr<communication::PacketPadParams> &params) {
bool MuxerPrivate::initStream(int64_t padId, const std::shared_ptr<const communication::PacketPadParams> &params) {
Q_Q(Muxer);
if (streams.find(padId) != streams.end() && streams[padId] != nullptr) {
......@@ -355,6 +363,13 @@ namespace AVQt {
qWarning() << "[Muxer] failed to create new stream";
return false;
}
stream->codecpar = avcodec_parameters_alloc();
if (!stream->codecpar) {
qWarning() << "[Muxer] failed to allocate codec parameters";
return false;
}
avcodec_parameters_copy(stream->codecpar, params->codecParams.get());
stream->time_base = {1, 1000000};// Microseconds
streams[padId] = stream;
......
......@@ -35,7 +35,7 @@ namespace AVQt {
void init(AVQt::Muxer::Config config);
bool initStream(int64_t padId, const std::shared_ptr<communication::PacketPadParams> &params);
bool initStream(int64_t padId, const std::shared_ptr<const communication::PacketPadParams> &params);
bool startStream(int64_t padId);
void stopStream(int64_t padId);
......
......@@ -124,7 +124,7 @@ int main(int argc, char *argv[]) {
encodeParams.bitrate = 10000000;
// encodeParams.codec = AVQt::Codec::H264;
AVQt::VideoEncoder::Config encoderConfig;
encoderConfig.codec = AVQt::Codec::H264;
encoderConfig.codec = AVQt::Codec::VP9;
encoderConfig.encodeParameters = encodeParams;
encoderConfig.encoderPriority << "VAAPI";
......@@ -133,8 +133,16 @@ int main(int argc, char *argv[]) {
demuxerConfig.inputDevice->open(QIODevice::ReadWrite);
demuxerConfig.loop = true;
auto *buffer = new QFile("output.ts");
buffer->open(QIODevice::WriteOnly);
auto demuxer = std::make_shared<AVQt::Demuxer>(std::move(demuxerConfig), registry);
AVQt::Muxer::Config muxerConfig{};
muxerConfig.outputDevice = std::unique_ptr<QIODevice>(buffer);
muxerConfig.containerFormat = AVQt::common::ContainerFormat::MKV;
auto muxer = std::make_shared<AVQt::Muxer>(std::move(muxerConfig), registry);
AVQt::VideoDecoder::Config videoDecoderConfig{};
videoDecoderConfig.decoderPriority << "VAAPI";
auto decoder1 = std::make_shared<AVQt::VideoDecoder>(videoDecoderConfig, registry);
......@@ -150,113 +158,141 @@ int main(int argc, char *argv[]) {
// auto decImpl = AVQt::VideoDecoderFactory::getInstance().create({AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE}, AV_CODEC_ID_H264, {"V4L2"});
// qDebug() << "decImpl" << decImpl.get();
demuxer->init();
// transcoder->init();
decoder1->init();
decoder2->init();
// decoder3->init();
encoder->init();
renderer1->init();
// renderer2->init();
yuvrgbconverter->init();
// frameSaver->init();
cc->init();
// cc2->init();
// pgraph::network::data::APIInfo apiInfo(registry);
// std::cout << QJsonDocument::fromJson(QByteArray::fromStdString(apiInfo.toString())).toJson(QJsonDocument::Indented).toStdString() << std::endl;
std::shared_ptr<pgraph::api::Pad> demuxerOutPad{};
auto demuxerPads = demuxer->getOutputPads();
for (const auto &pad : demuxerPads) {
if (pad.second->getUserData()->getType() == AVQt::communication::PacketPadParams::Type) {
const auto padParams = std::dynamic_pointer_cast<const AVQt::communication::PacketPadParams>(pad.second->getUserData());
if (padParams->mediaType == AVMEDIA_TYPE_VIDEO) {
demuxerOutPad = pad.second;
break;
{
demuxer->init();
muxer->init();
// transcoder->init();
decoder1->init();
decoder2->init();
// decoder3->init();
encoder->init();
renderer1->init();
// renderer2->init();
yuvrgbconverter->init();
// frameSaver->init();
cc->init();
// cc2->init();
// pgraph::network::data::APIInfo apiInfo(registry);
// std::cout << QJsonDocument::fromJson(QByteArray::fromStdString(apiInfo.toString())).toJson(QJsonDocument::Indented).toStdString() << std::endl;
std::shared_ptr<pgraph::api::Pad> demuxerOutPad{};
auto demuxerPads = demuxer->getOutputPads();
for (const auto &pad : demuxerPads) {
if (pad.second->getUserData()->getType() == AVQt::communication::PacketPadParams::Type) {
const auto padParams = std::dynamic_pointer_cast<const AVQt::communication::PacketPadParams>(pad.second->getUserData());
if (padParams->mediaType == AVMEDIA_TYPE_VIDEO) {
demuxerOutPad = pad.second;
break;
}
}
}
auto decoder1InPad = decoder1->getInputPads().begin()->second;
auto decoder1OutPad = decoder1->getOutputPads().begin()->second;
auto decoder2InPad = decoder2->getInputPads().begin()->second;
auto decoder2OutPad = decoder2->getOutputPads().begin()->second;
// auto decoder3InPad = decoder3->getInputPads().begin()->second;
// auto decoder3OutPad = decoder3->getOutputPads().begin()->second;
auto encoderInPad = encoder->getInputPads().begin()->second;
auto encoderOutPad = encoder->getOutputPads().begin()->second;
auto renderer1InPad = renderer1->getInputPads().begin()->second;
// auto renderer2InPad = renderer2->getInputPads().begin()->second;
auto ccInPad = cc->getInputPads().begin()->second;
// auto cc2InPad = cc2->getInputPads().begin()->second;
auto yuvrgbconverterInPad = yuvrgbconverter->getInputPads().begin()->second;
auto yuvrgbconverterOutPad = yuvrgbconverter->getOutputPads().begin()->second;
// auto frameSaverInPad = frameSaver->getInputPads().begin()->second;
auto muxerInPadId = muxer->createStreamPad();
auto muxerInPad = muxer->getInputPad(muxerInPadId);
// cc2InPad->link(decoderOutPad);
// decoder1InPad->link(transcoderPacketOutPad);
// encoderInPad->link(decoder1OutPad);
// decoder2InPad->link(encoderOutPad);
decoder1InPad->link(demuxerOutPad);
// ccInPad->link(decoder1OutPad);
encoderInPad->link(decoder1OutPad);
// yuvrgbconverterInPad->link(decoder1OutPad);
// renderer2InPad->link(decoder1OutPad);
renderer1InPad->link(decoder1OutPad);
// decoder1OutPad->link(yuvrgbconverterInPad);
// renderer1InPad->link(decoder1OutPad);
// renderer2InPad->link(decoder2OutPad);
// yuvrgbconverterOutPad->link(frameSaverInPad);
muxerInPad->link(encoderOutPad);
demuxer->open();
renderer1->resize(1280, 720);
// renderer2->resize(1280, 720);
demuxer->start();
QTimer::singleShot(5000, [demuxer]() {
QApplication::quit();
});
// QTimer::singleShot(4000, [demuxer]{
// demuxer->pause(true);
// QTimer::singleShot(4000, [demuxer]{
// demuxer->pause(false);
// QTimer::singleShot(15000, [renderer1, renderer2] {
// renderer1->close();
// renderer2->close();
// });
// });
// });
// AVQt::api::IDesktopCaptureImpl::Config config{};
// config.fps = 1;
// config.sourceClass = AVQt::api::IDesktopCaptureImpl::SourceClass::Screen;
// auto capturer = std::make_shared<AVQt::DesktopCapturer>(config, registry);
// auto cc = std::make_shared<CommandConsumer>(registry);
//
// capturer->init();
// cc->init();
//
// auto ccInPad = cc->getInputPads().begin()->second;
// auto capturerOutPad = capturer->getOutputPads().begin()->second;
//
// ccInPad->link(capturerOutPad);
//
// capturer->open();
// capturer->start();
//
QObject::connect(app, &QApplication::aboutToQuit, [demuxer /*, buffer, &muxer, &decoder1, &renderer1, &encoder*/] {
demuxer->close();
// demuxer.reset();
// decoder1.reset();
// renderer1.reset();
// encoder.reset();
// muxer.reset();
// std::cout << "Buffer size: " << buffer->size() << std::endl;
// QFile output("output.ts");
// output.open(QIODevice::WriteOnly);
// output.write(buffer->data(), buffer->size());
// output.close();
// delete buffer;
});
//
// QTimer::singleShot(15000, [] {
// QApplication::quit();
// });
}
auto decoder1InPad = decoder1->getInputPads().begin()->second;
auto decoder1OutPad = decoder1->getOutputPads().begin()->second;
auto decoder2InPad = decoder2->getInputPads().begin()->second;
auto decoder2OutPad = decoder2->getOutputPads().begin()->second;
// auto decoder3InPad = decoder3->getInputPads().begin()->second;
// auto decoder3OutPad = decoder3->getOutputPads().begin()->second;
auto encoderInPad = encoder->getInputPads().begin()->second;
auto encoderOutPad = encoder->getOutputPads().begin()->second;
auto renderer1InPad = renderer1->getInputPads().begin()->second;
// auto renderer2InPad = renderer2->getInputPads().begin()->second;
auto ccInPad = cc->getInputPads().begin()->second;
// auto cc2InPad = cc2->getInputPads().begin()->second;
auto yuvrgbconverterInPad = yuvrgbconverter->getInputPads().begin()->second;
auto yuvrgbconverterOutPad = yuvrgbconverter->getOutputPads().begin()->second;
// auto frameSaverInPad = frameSaver->getInputPads().begin()->second;
// cc2InPad->link(decoderOutPad);
// decoder1InPad->link(transcoderPacketOutPad);
// encoderInPad->link(decoder1OutPad);
// decoder2InPad->link(encoderOutPad);
decoder1InPad->link(demuxerOutPad);
// ccInPad->link(decoder1OutPad);
encoderInPad->link(decoder1OutPad);
// decoder2InPad->link(encoderOutPad);
// yuvrgbconverterInPad->link(decoder1OutPad);
// renderer2InPad->link(decoder1OutPad);
renderer1InPad->link(decoder1OutPad);
// decoder1OutPad->link(yuvrgbconverterInPad);
// renderer1InPad->link(decoder1OutPad);
// renderer2InPad->link(decoder2OutPad);
// yuvrgbconverterOutPad->link(frameSaverInPad);
demuxer->open();
renderer1->resize(1280, 720);
// renderer2->resize(1280, 720);
demuxer->start();
// QTimer::singleShot(5000, [demuxer]() {
// QApplication::quit();
// });
// QTimer::singleShot(4000, [demuxer]{
// demuxer->pause(true);
// QTimer::singleShot(4000, [demuxer]{
// demuxer->pause(false);
// QTimer::singleShot(15000, [renderer1, renderer2] {
// renderer1->close();
// renderer2->close();
// });
// });
// });
// AVQt::api::IDesktopCaptureImpl::Config config{};
// config.fps = 1;
// config.sourceClass = AVQt::api::IDesktopCaptureImpl::SourceClass::Screen;
// auto capturer = std::make_shared<AVQt::DesktopCapturer>(config, registry);
// auto cc = std::make_shared<CommandConsumer>(registry);
//
// capturer->init();
// cc->init();
//
// auto ccInPad = cc->getInputPads().begin()->second;
// auto capturerOutPad = capturer->getOutputPads().begin()->second;
//
// ccInPad->link(capturerOutPad);
//
// capturer->open();
// capturer->start();
//
QObject::connect(app, &QApplication::aboutToQuit, [demuxer] {
demuxer->close();
});
//
// QTimer::singleShot(15000, [] {
// QApplication::quit();
// });
return QApplication::exec();
int ret = QApplication::exec();
demuxer.reset();
decoder1.reset();
decoder2.reset();
// decoder3.reset();
encoder.reset();
renderer1.reset();
// renderer2.reset();
muxer.reset();
return ret;
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment