QT开发中文网

qt中如何不打开文件 不用url 直接接收udp发过来的码流 然后送到ffmpeg解码并且通过ui文件中的QOpenGLWidget播放视频完整代码

以下是一个简单的示例代码,演示如何使用Qt接收UDP数据流并将其发送到FFmpeg进行解码,然后在QOpenGLWidget中播放视频。

main.cpp

#include "udpstreamreceiver.h"
#include "videoplayer.h"

#include <QApplication>
#include <QObject>

int main(int argc, char *argv[])
{
    QApplication a(argc, argv);

    // 创建UDP数据流接收器
    UdpStreamReceiver receiver;

    // 创建视频播放器
    VideoPlayer player;

    // 将接收器的信号连接到播放器槽函数
    QObject::connect(&receiver, &UdpStreamReceiver::receivedData,
                     &player, &VideoPlayer::processVideoData);

    player.show();
    return a.exec();
}

udpstreamreceiver.h

#ifndef UDPSTREAMRECEIVER_H
#define UDPSTREAMRECEIVER_H

#include <QUdpSocket>

class UdpStreamReceiver : public QObject
{
    Q_OBJECT
public:
    explicit UdpStreamReceiver(QObject *parent = nullptr);
signals:
    void receivedData(const QByteArray& data);
private slots:
    void readPendingDatagrams();
private:
    QUdpSocket m_udpSocket;
};

#endif // UDPSTREAMRECEIVER_H

udpstreamreceiver.cpp

#include "udpstreamreceiver.h"

UdpStreamReceiver::UdpStreamReceiver(QObject *parent) : QObject(parent)
{
    // 绑定端口,开始监听UDP数据包
    m_udpSocket.bind(12345);

    connect(&m_udpSocket, &QUdpSocket::readyRead,
            this, &UdpStreamReceiver::readPendingDatagrams);
}

void UdpStreamReceiver::readPendingDatagrams()
{
     while (m_udpSocket.hasPendingDatagrams()) {
        QByteArray datagram;
        datagram.resize(m_udpSocket.pendingDatagramSize());
        m_udpSocket.readDatagram(datagram.data(), datagram.size());

        // 发送收到的数据
        emit receivedData(datagram);
    }
}

videoplayer.h

#ifndef VIDEOPLAYER_H
#define VIDEOPLAYER_H

#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>

extern "C" {
#include <libavcodec/avcodec.h>
}

class VideoPlayer : public QOpenGLWidget, protected QOpenGLFunctions
{
    Q_OBJECT
public:
    explicit VideoPlayer(QWidget *parent = nullptr);
    void processVideoData(const QByteArray& data);
protected:
    void initializeGL() override;
    void resizeGL(int w, int h) override;
    void paintGL() override;
private:
    AVCodec* m_codec = nullptr;
    AVCodecContext* m_codecCtx = nullptr;
    AVFrame* m_frame = nullptr;
    uint8_t* m_frameBuffer = nullptr;

    QOpenGLShaderProgram m_shaderProg;

};

#endif // VIDEOPLAYER_H

videoplayer.cpp

#include "videoplayer.h"

VideoPlayer::VideoPlayer(QWidget *parent)
   : QOpenGLWidget(parent)
{
}

void VideoPlayer::processVideoData(const QByteArray &data)
{
     // 初始化FFmpeg解码器,只在第一帧数据接收时执行一次。
     if (!m_codec) {
        av_register_all();
        avcodec_register_all();

        m_codec = avcodec_find_decoder(AV_CODEC_ID_H264);
        if (!m_codec) {
            qCritical() << "Failed to find decoder for H264 codec";
            return;
        }

        m_codecCtx = avcodec_alloc_context3(m_codec);

        if (avcodec_open2(m_codecCtx, m_codec, nullptr) < 0) {
            qCritical() << "Failed to open codec";
            return;
        }

        m_frame = av_frame_alloc();
        if (!m_frame) {
            qCritical() << "Failed to allocate frame";
            return;
        }

        // 为解码后的图像分配内存
        int numBytes = av_image_get_buffer_size(
                            AV_PIX_FMT_RGB24,
                            m_codecCtx->width,
                            m_codecCtx->height,
                            1);

        m_frameBuffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));

        av_image_fill_arrays(
                    m_frame->data,
                    m_frame->linesize,
                    m_frameBuffer,
                    AV_PIX_FMT_RGB24,
                    m_codecCtx->width,
                    m_codecCtx->height,
                    1);
    }

    // 解码数据
    AVPacket packet;
    av_init_packet(&packet);
    packet.data = (uint8_t*)data.data();
    packet.size = data.size();

    int ret = avcodec_send_packet(m_codecCtx, &packet);
    if (ret < 0) {
        qCritical() << "Error sending a packet for decoding";
        return;
    }

     while (ret >= 0) {
         ret = avcodec_receive_frame(m_codecCtx, m_frame);
         if (ret == AVERROR(EAGAIN)) {
             break;
         } else if (ret < 0) {
             qCritical() << "Error during decoding";
             break;
         }

         update(); // 触发paintGL函数
     }
}

void VideoPlayer::initializeGL()
{
   initializeOpenGLFunctions();

   glClearColor(0.5f, 0.5f, 0.5f, 1.0f);

   // 设置顶点着色器和片段着色器
   m_shaderProg.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/vertex.glsl");
   m_shaderProg.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/fragment.glsl");
   m_shaderProg.link();
}

void VideoPlayer::resizeGL(int w, int h)
{
    glViewport(0, 0, w, h);
}

void VideoPlayer::paintGL()
{
    // 清空屏幕
    glClear(GL_COLOR_BUFFER_BIT);

    if (!m_frame) {
        return;
    }

    // 将YUV数据转换为RGB24格式
    struct SwsContext* swsCtx = sws_getContext(
                m_codecCtx->width,
                m_codecCtx->height,
                m_codecCtx->pix_fmt,
                m_codecCtx->width,
                m_codecCtx->height,
                AV_PIX_FMT_RGB24,
                SWS_BILINEAR,
                nullptr, nullptr, nullptr);

    sws_scale(
            swsCtx,
            (const uint8_t* const*)m_frame->data,
            m_frame->linesize,
            0,
            m_codecCtx->height,
            (uint8_t* const*)m_frameBuffer,
            (const int*)&m_frame->linesize[0]);

    // 绘制图像
    QImage img(m_frameBuffer, m_codecCtx->width, m_codecCtx->height, QImage::Format_RGB888);
    img = img.rgbSwapped(); // 需要颜色翻转

    QPainter painter(this);
    painter.drawImage(rect(), img);

}

其中,还需要在Qt的.pro文件中添加以下内容:

QT += opengl widgets

SOURCES += \
    main.cpp \
    udpstreamreceiver.cpp \
    videoplayer.cpp

HEADERS += \
    udpstreamreceiver.h \
    videoplayer.h

RESOURCES += \
    shaders.qrc

并在项目根目录下创建一个shaders.qrc文件,用于将顶点着色器和片段着色器嵌入到应用程序中:

<!DOCTYPE RCC>
<RCC version="1.0">
  <qresource>
    <file>shaders/vertex.glsl</file>
    <file>shaders/fragment.glsl</file>
  </qresource>
</RCC>

最后,需要编写两个GLSL文件(vertex.glsl和fragment.glsl),用于设置顶点着色器和片段着色器。这里我们简单地使用红蓝相间的效果:

vertex.glsl

#version 330 core
layout (location = 0) in vec3 aPos;

void main()
{
   gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);
}

fragment.glsl

#version 330 core
out vec4 FragColor;
void main()
{
   if ((int(gl_FragCoord.x) + int(gl_FragCoord.y)) % 2 == 0)
       FragColor = vec4(1.0f, 0.5f, 0.5f, 1.0f);
   else
       FragColor = vec4(0.5f, 0.5f, 1.0f, 1.0f);
}

以上就是一个简单的使用Qt接收UDP数据流并在QOpenGLWidget中播放视频的示例。

本站部分文章来源于网络,版权归原作者所有,如有侵权请联系站长删除。
转载请注明出处:https://qt.0voice.com/?id=2403

分享:
扫描分享到社交APP
上一篇
下一篇
发表列表
游客 游客
此处应有掌声~
评论列表
  • liuhao
    2023年09月26日 18:00:42
    你好,问一下,按你们的方法,接收UDP数据,有以下报错,怎么回事?
    [IMGUTILS @ 0000005fcf5fb040] Picture size 0x0 is invalid
    [IMGUTILS @ 0000005fcf5fb080] Picture size 0x0 is invalid
    [h264 @ 0000020ed6240880] non-existing PPS 0 referenced
    [h264 @ 0000020ed6240880] decode_slice_header error
    [h264 @ 0000020ed6240880] no frame!

联系我们

在线咨询: 点击这里给我发消息

微信号:3007537140

上班时间: 10:30-22:30

关注我们
x

注册

已经有帐号?