加载中…
个人资料
  • 博客等级:
  • 博客积分:
  • 博客访问:
  • 关注人气:
  • 获赠金笔:0支
  • 赠出金笔:0支
  • 荣誉徽章:
正文 字体大小:

Qt实现播放H264视频

(2019-12-01 11:00:09)
工具:
   Qt 5.5 、ffmpeg version N-95183-g97450d2b6a

本文参考:
#include "qmyimagethread.h"

QMyImageThread::QMyImageThread(QObject *parent)
    : QThread(parent)
{

}

QMyImageThread::~QMyImageThread()
{

}

extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/pixfmt.h"
#include "libavutil/imgutils.h"
}

typedef enum {
    NALU_TYPE_SLICE    = 1,
    NALU_TYPE_DPA      = 2,
    NALU_TYPE_DPB      = 3,
    NALU_TYPE_DPC      = 4,
    NALU_TYPE_IDR      = 5,
    NALU_TYPE_SEI      = 6,
    NALU_TYPE_SPS      = 7,
    NALU_TYPE_PPS      = 8,
    NALU_TYPE_AUD      = 9,
    NALU_TYPE_EOSEQ    = 10,
    NALU_TYPE_EOSTREAM = 11,
    NALU_TYPE_FILL     = 12,
} NaluType;

typedef enum {
    NALU_PRIORITY_DISPOSABLE = 0,
    NALU_PRIRITY_LOW         = 1,
    NALU_PRIORITY_HIGH       = 2,
    NALU_PRIORITY_HIGHEST    = 3
} NaluPriority;


typedef struct
{
    int startcodeprefix_len;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
    unsigned len;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
    unsigned max_size;            //! Nal Unit Buffer size
    int forbidden_bit;            //! should be always FALSE
    int nal_reference_idc;        //! NALU_PRIORITY_xxxx
    int nal_unit_type;            //! NALU_TYPE_xxxx   
    char *buf;                    //! contains the first byte followed by the EBSP
} NALU_t;

FILE *h264bitstream = NULL;                //!< the bit stream file

int info2=0, info3=0;

static int FindStartCode2 (unsigned char *Buf){
    if(Buf[0]!=0 || Buf[1]!=0 || Buf[2] !=1) return 0; //0x000001?
    else return 1;
}

static int FindStartCode3 (unsigned char *Buf){
    if(Buf[0]!=0 || Buf[1]!=0 || Buf[2] !=0 || Buf[3] !=1) return 0;//0x00000001?
    else return 1;
}

// 从H264码流中获取NALU
int GetAnnexbNALU (NALU_t *nalu){
    int pos = 0;
    int StartCodeFound, rewind;
    unsigned char *Buf;

    if ((Buf = (unsigned char*)calloc (nalu->max_size , sizeof(char))) == NULL)
        printf ("GetAnnexbNALU: Could not allocate Buf memory\n");

    nalu->startcodeprefix_len=3;

    if (3 != fread (Buf, 1, 3, h264bitstream)){
        free(Buf);
        return 0;
    }
    info2 = FindStartCode2 (Buf);
    if(info2 != 1) {
        if(1 != fread(Buf+3, 1, 1, h264bitstream)){
            free(Buf);
            return 0;
        }
        info3 = FindStartCode3 (Buf);
        if (info3 != 1){
            free(Buf);
            return -1;
        }
        else {
            pos = 4;
            nalu->startcodeprefix_len = 4;
        }
    }
    else{
        nalu->startcodeprefix_len = 3;
        pos = 3;
    }
    StartCodeFound = 0;
    info2 = 0;
    info3 = 0;

    while (!StartCodeFound){
        if (feof (h264bitstream)){
            nalu->len = (pos-1)-nalu->startcodeprefix_len;
            memcpy (nalu->buf, &Buf[nalu->startcodeprefix_len], nalu->len);    
            nalu->forbidden_bit = nalu->buf[0] & 0x80; //1 bit
            nalu->nal_reference_idc = nalu->buf[0] & 0x60; // 2 bit
            nalu->nal_unit_type = (nalu->buf[0]) & 0x1f;// 5 bit
            free(Buf);
            return pos-1;
        }
        Buf[pos++] = fgetc (h264bitstream);
        info3 = FindStartCode3(&Buf[pos-4]);
        if(info3 != 1)
            info2 = FindStartCode2(&Buf[pos-3]);
        StartCodeFound = (info2 == 1 || info3 == 1);
    }

    // Here, we have found another start code (and read length of startcode bytes more than we should
    // have.  Hence, go back in the file
    rewind = (info3 == 1)? -4 : -3;

    if (0 != fseek (h264bitstream, rewind, SEEK_CUR)){
        free(Buf);
        printf("GetAnnexbNALU: Cannot fseek in the bit stream file");
    }

    // Here the Start code, the complete NALU, and the next start code is in the Buf. 
    // The size of Buf is pos, pos+rewind are the number of bytes excluding the next
    // start code, and (pos+rewind)-startcodeprefix_len is the size of the NALU excluding the start code

    nalu->len = (pos+rewind)-nalu->startcodeprefix_len;
    memcpy (nalu->buf, &Buf[nalu->startcodeprefix_len], nalu->len);//
    nalu->forbidden_bit = nalu->buf[0] & 0x80; //1 bit
    nalu->nal_reference_idc = nalu->buf[0] & 0x60; // 2 bit
    nalu->nal_unit_type = (nalu->buf[0]) & 0x1f;// 5 bit
    free(Buf);

    return (pos+rewind);
}

AVFrame *pFrame = NULL;
AVFrame *pFrameRGB = NULL;
//AVFormatContext *
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFormatContext *pFormatCtx = 0;
int nVideoStreamIndex = -1;
int m_nFrameRate = 0;
int decoder_Init()
{
    av_register_all();

    pFormatCtx = avformat_alloc_context();
    if(0 == pFormatCtx)
    {
        return 1;
    }

    int n = avformat_open_input(&pFormatCtx,"ds.h264", 0, 0);
    if(0 != n)
    {
        //std::cout << "avformat_open_input failed \n";
        return 1;
    }

    n = avformat_find_stream_info(pFormatCtx, 0);
    if(n < 0)
    {
        //std::cout << "avformat_find_stream_info failed \n";
        return 1;
    }


    for (int i = 0; i < pFormatCtx->nb_streams; ++i)
    {
        if(AVMEDIA_TYPE_VIDEO == pFormatCtx->streams[i]->codecpar->codec_type)
        {
            nVideoStreamIndex = i;
            break;
        }
    }

    if(-1 == nVideoStreamIndex)
    {
        return 1;
    }

    pCodecCtx = pFormatCtx->streams[nVideoStreamIndex]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if(0 == pCodec)
    {
        return 1;
    }


   
    if (avcodec_open2(pCodecCtx, pCodec,NULL) < 0) {
        fprintf(stderr, "could not open codec");
    }

    // Allocate video frame
    pFrame = av_frame_alloc();
    if(pFrame == NULL)
        return -1;

    pFrameRGB = av_frame_alloc();
    if(pFrameRGB == NULL)
        return -1;

    return 0;
}

int decodeH264(uint8_t *inputbuf, int frame_size, uint8_t *&outBuf, int &outWidth, int &outHeight)
{

    int got_picture;
    int av_result;

    uint8_t *bufferRGB = NULL;
    AVPacket pkt;
    av_init_packet(&pkt);
    pkt.data = inputbuf;
    pkt.size = frame_size;

    if (av_read_frame(pFormatCtx, &pkt) < 0)
    {
        return 0; //这里认为视频读取完了
    }

    if (pkt.stream_index != nVideoStreamIndex)
    {
        return 0;
    }
    av_result = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &pkt); //解码

    if (av_result < 0)
    {
        fprintf(stderr, "decode failed: inputbuf = 0x%x , input_framesize = %d", inputbuf, frame_size);
        return -1;
    }
    av_free_packet(&pkt);

    //前面初始化解码器的时候 并没有设置视频的宽高信息,
    //因为h264的每一帧数据都带有编码的信息,当然也包括这些宽高信息了,因此解码完之后,便可以知道视频的宽高是多少
    //这就是为什么 初始化编码器的时候 需要初始化高度,而初始化解码器却不需要。
    //解码器可以直接从需要解码的数据中获得宽高信息,这样也才会符合道理。
    //所以一开始没有为bufferRGB分配空间 因为没办法知道 视频宽高
    //一旦解码了一帧之后 就可以知道宽高了这时候就可以分配了
    SwsContext *img_convert_ctx;
    if (bufferRGB == NULL)
    {
        int width = pCodecCtx->width;
        int height = pCodecCtx->height;
        int a = pCodecCtx->time_base.den;
        int b = pCodecCtx->time_base.num;
        m_nFrameRate = a / b;
        int numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, width,height);
        bufferRGB = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
        avpicture_fill((AVPicture *)pFrameRGB, bufferRGB, AV_PIX_FMT_RGB32,width, height);

        img_convert_ctx = sws_getContext(width,height,pCodecCtx->pix_fmt,width,height,AV_PIX_FMT_RGB32,SWS_BICUBIC, NULL,NULL,NULL);


    }

    if (got_picture)
    {
        //格式转换 解码之后的数据是yuv420p的 把她转换成 rgb的图像数据
        sws_scale(img_convert_ctx,
            (uint8_t const * const *) pFrame->data,
            pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,
            pFrameRGB->linesize);

        outBuf = bufferRGB;
        outWidth = pCodecCtx->width;
        outHeight = pCodecCtx->height;

    }

    return got_picture;
}


void QMyImageThread::run()
{
    decoder_Init();
    NALU_t *n;
    int buffersize=100000;
    FILE *myout=stdout;

    h264bitstream=fopen("ds.h264", "rb+");
    if (h264bitstream==NULL)
    {
        printf("Open file error\n");
        return;
    }

    n = (NALU_t*)calloc (1, sizeof (NALU_t));
    if (n == NULL){
        printf("Alloc NALU Error\n");
        return;
    }

    n->max_size=buffersize;
    n->buf = (char*)calloc (buffersize, sizeof (char));
    if (n->buf == NULL)
    {
        free (n);
        printf ("AllocNALU: n->buf");
        return;
    }

    int data_offset=0;
    int nal_num=0;
    printf("-----+-------- NALU Table ------+---------+\n");
    printf(" NUM |    POS    IDC |  TYPE |   LEN   |\n");
    printf("-----+---------+--------+-------+---------+\n");

    int nframeNum = 0;
    while(!feof(h264bitstream))
    {
        int data_lenth;
        data_lenth=GetAnnexbNALU(n);

        uint8_t *bufferRGB;
        int width;
        int height;

        decodeH264((uint8_t*)n->buf, n->len, bufferRGB, width, height);

        //int frameRate = getFrameRate(); //获取帧率

        /// h264裸数据不包含时间戳信息因此只能根据帧率做同步
        /// 需要成功解码一帧后 才能获取到帧率
        /// 为0说明还没获取到 则直接显示
        if (m_nFrameRate != 0)
        {
            msleep(m_nFrameRate);
        }

        //把这个RGB数据 放入QIMage
        QImage image = QImage((uchar *)bufferRGB, width, height, QImage::Format_RGB32);

        //然后传给主线程显示
        emit sigGetOneFrame(image.copy(), ++nframeNum);
    }

    free (n->buf);
    free(n);
}


//
#ifndef QMYIMAGETHREAD_H
#define QMYIMAGETHREAD_H

#include"QThread"
#include "QImage"
class QMyImageThread : public QThread
{
    Q_OBJECT

public:
    QMyImageThread(QObject *parent);
    ~QMyImageThread();

private:
signals:
    void sigGetOneFrame(QImage img, int nFrame);
private:
    void run();
};
#endif // QMYIMAGETHREAD_H



#include "mainwindow.h"
#include "ui_mainwindow.h"
#include "QPainter"
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);
    m_pMyThread = new QMyImageThread(this);
    connect(m_pMyThread, SIGNAL(sigGetOneFrame(QImage, int)), this, SLOT(GetOneFrameSlot(QImage, int)));
    m_pMyThread->start();
}

MainWindow::~MainWindow()
{
    delete ui;
}

void MainWindow::GetOneFrameSlot(QImage img, int nFrameNum)
{
    m_Image = img.copy();
    update(); //调用update将执行 paintEvent函数
}

void MainWindow::paintEvent(QPaintEvent *event)
{

    QPainter painter(this);
    painter.setBrush(Qt::black);
    painter.drawRect(0, 0, this->width(), this->height()); //先画成黑色

    if (m_Image.size().width() <= 0)
    {
        return;
    }
    ///将图像按比例缩放成和窗口一样大小
    QImage img = m_Image.scaled(this->size(),Qt::KeepAspectRatio);

    int n = img.width();
    int b = this->width();
    int x = this->width() - img.width();
    int y = this->height() - img.height();

    x /= 2;
    y /= 2;

    painter.drawImage(QPoint(x,y),img); //画出图像
}


#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include "qmyimagethread.h"
#include "QMainWindow"

namespace Ui {
class MainWindow;
}

class MainWindow : public QMainWindow
{
    Q_OBJECT

public:
    explicit MainWindow(QWidget *parent = 0);
    ~MainWindow();
private slots:
    void GetOneFrameSlot(QImage img, int nFrame);

private:
    void paintEvent(QPaintEvent *event);
private:
    Ui::MainWindow *ui;
    QMyImageThread *m_pMyThread;
    QImage m_Image;
};

#endif // MAINWINDOW_H

0

阅读 收藏 喜欢 打印举报/Report
  

新浪BLOG意见反馈留言板 欢迎批评指正

新浪简介 | About Sina | 广告服务 | 联系我们 | 招聘信息 | 网站律师 | SINA English | 产品答疑

新浪公司 版权所有