1. 程式人生 > >從零實現簡易播放器:4.ffmpeg 解碼視訊為yuv資料-使用avcodec_send_packet與avcodec_receive_frame

從零實現簡易播放器:4.ffmpeg 解碼視訊為yuv資料-使用avcodec_send_packet與avcodec_receive_frame

ffmpeg 解碼視訊為yuv資料

作者:史正
郵箱:[email protected]
如有錯誤還請及時指正
如果有錯誤的描述給您帶來不便還請見諒
如需交流請傳送郵件,歡迎聯絡

csdn : https://blog.csdn.net/shizheng163

github : https://github.com/shizheng163

文章目錄

簡述

簡單描述下視訊播放的步驟:

  • 解複用:將輸入的視訊變為編碼後的壓縮資料
  • 解碼: 將壓縮資料變為顏色空間(YUV, RGB等)
  • 渲染: 將YUV等顏色空間繪製在顯示裝置上形成影象

下面對應上述流程說明下ffmpeg解碼為yuv資料的介面呼叫。

ffmpeg版本:4.1

前文曾經提過解碼後的影象資料為YUV420, 如果只需要YUV420, 那麼就不需要進行影象轉換,影象色彩空間轉換環境也無需初始化。

另外為了對比播放器的播放速度特意使用如下命令為視訊添加了水印:

  • ffmpeg -i ./Suger.mp4 -vf "drawtext=expansion=strftime: basetime=$(date +%s -d '2019-01-12 00:00:00')000000 :text='%Y-%m-%d %H\\:%M\\:%S':fontsize=30:fontcolor=white:box=1:x=10:y=10:
    [email protected]
    :" -strict -2 -y "SugerTime.mp4"

2019-01-12 00:00:00為起始時間戳。

錯誤處理

  • 處理過程中出現bad dst image pointers

    此問題是因為沒有呼叫av_image_alloc

  • 解碼過程中出現:Invalid data found when processing input

    此問題出現是因為沒呼叫avcodec_parameters_to_contextavcodec_open2

程式碼實現

如果開啟的url是網路流需要先呼叫avformat_network_init()

初始化網路環境。

完整程式碼可見 https://github.com/shizheng163/MyMediaPlayer/tree/v0.3.0
.h 檔案

/*
 * copyright (c) 2018-2019 shizheng. All Rights Reserved.
 * Please retain author information while you reference code
 * date:   2019-01-13
 * author: shizheng
 * email:  [email protected]
 */
#ifndef FFDECODER_H
#define FFDECODER_H
#include <string>
#include <fileutil.h>
#include <functional>
#include <mutex>
#include <thread>
struct AVFormatContext;
struct AVFrame;
struct AVCodecContext;
namespace ffmpegutil {
typedef fileutil::PictureFilePtr YuvDataPtr;
class FFDecoder
{
public:
    typedef std::function<void (YuvDataPtr pYuvData) > ProcessYuvDataCallback;
    typedef std::function<void (bool bIsOccurErr) > DecodeThreadExitCallback;
    FFDecoder();
    ~FFDecoder();
    bool InitializeDecoder(std::string url);
    bool StartDecodeThread();
    void StopDecodeThread();
    /**
     * @brief 設定解碼資料回撥函式
     */
    void SetProcessDataCallback(ProcessYuvDataCallback callback);
    /**
     * @brief 設定解碼執行緒退出的回撥函式
     */
    void SetDecodeThreadExitCallback(DecodeThreadExitCallback callback);
    /**
     * @brief 獲取錯誤原因
     * @note 當錯誤發生時, 錯誤原因會被記錄
     */
    std::string ErrName() const { return m_szErrName;}
    /**
     * @brief 獲取視訊幀率
     */
    float GetVideoFrameRate();
private:

    void decodeInThread();

    std::string                 m_szUrl;
    AVFormatContext             *m_pInputFormatContext;
    int                         m_nVideoStreamIndex;
    AVCodecContext              *m_pCodecContext;

    //處理解碼後資料的回撥函式
    std::mutex                  m_mutexForFnProcessYuvData;
    ProcessYuvDataCallback      m_fnProcssYuvData;

    std::thread                 m_threadForDecode;
    bool                        m_bIsRunDecodeThread;

    std::string                 m_szErrName;

    //處理解碼執行緒退出的回撥函式
    std::mutex                  m_mutexForFnThreadExit;
    DecodeThreadExitCallback    m_fnThreadExit;
};
}//namespace ffmpegutil

#endif // FFDECODER_H

cpp檔案

/*
 * copyright (c) 2018-2019 shizheng. All Rights Reserved.
 * Please retain author information while you reference code
 * date:   2019-01-13
 * author: shizheng
 * email:  [email protected]
 */
#include "ffdecoder.h"
#include <memory>
extern "C"
{
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
#include "logutil.h"
#include "ffmpegutil.h"


using namespace ffmpegutil;
using namespace std;
using namespace logutil;

typedef std::shared_ptr<AVPacket> AVPacketPtr;
typedef std::shared_ptr<AVFrame> AVFramePtr;

FFDecoder::FFDecoder()
    :m_pInputFormatContext(NULL)
    ,m_nVideoStreamIndex(-1)
    ,m_pCodecContext(NULL)
{
}

FFDecoder::~FFDecoder()
{
    if(m_threadForDecode.joinable())
        m_threadForDecode.join();

    if(m_pInputFormatContext)
    {
        avformat_close_input(&m_pInputFormatContext);
        avformat_free_context(m_pInputFormatContext);
        m_pInputFormatContext = NULL;
    }
    if(m_pCodecContext)
    {
        avcodec_free_context(&m_pCodecContext);
        m_pCodecContext = NULL;
    }
}

bool FFDecoder::InitializeDecoder(string url)
{
    m_szUrl = url;
    int ret = 0;
    ret = avformat_open_input(&m_pInputFormatContext, m_szUrl.c_str(), NULL, NULL);
    //GetStrError是對av_strerror的一次封裝。
    if(ret < 0)
    {
        m_szErrName = MySprintf("FFDecoder open input failed, url = %s, err: %s", m_szUrl.c_str(), GetStrError(ret).c_str());
        return false;
    }
    for(unsigned i = 0; i < m_pInputFormatContext->nb_streams; i++)
    {
        if(m_pInputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_nVideoStreamIndex = i;
            break;
        }
    }

    if( m_nVideoStreamIndex == -1)
    {
        m_szErrName = MySprintf("FFDecoder could find video stream, url = %s", m_szUrl.c_str());
        return false;
    }

    ret = avformat_find_stream_info(m_pInputFormatContext, NULL);
    if(ret < 0)
    {
        m_szErrName = MySprintf("FFDecoder find stream info failed, url = %s, err: %s", m_szUrl.c_str(), GetStrError(ret).c_str());
        return false;
    }

//    av_dump_format(m_pInputFormatContext, m_nVideoStreamIndex, NULL, 0);

    //InitAVDecoder
    AVCodec * pCodec = avcodec_find_decoder(m_pInputFormatContext->streams[m_nVideoStreamIndex]->codecpar->codec_id);
    if(!pCodec)
    {
        m_szErrName = MySprintf("FFDecoder find AVCodec failed, url = %s, codec: %s", m_szUrl.c_str(), avcodec_get_name(m_pInputFormatContext->streams[m_nVideoStreamIndex]->codecpar->codec_id));
        return false;
    }
    m_pCodecContext = avcodec_alloc_context3(pCodec);

    //複製解碼器引數
    ret = avcodec_parameters_to_context(m_pCodecContext, m_pInputFormatContext->streams[m_nVideoStreamIndex]->codecpar);

    if(ret < 0)
    {
        m_szErrName = MySprintf("FFDecoder avcodec_parameters_to_context failed, url = %s, err = %s", m_szUrl.c_str(), avcodec_get_name(m_pInputFormatContext->streams[m_nVideoStreamIndex]->codecpar->codec_id), GetStrError(ret).c_str());
        return false;
    }

    ret = avcodec_open2(m_pCodecContext, pCodec, NULL);
    if(ret < 0)
    {
        m_szErrName = MySprintf("FFDecoder AVCodec Open  failed, url = %s, codec: %s, err = %s", m_szUrl.c_str(), avcodec_get_name(m_pInputFormatContext->streams[m_nVideoStreamIndex]->codecpar->codec_id), GetStrError(ret).c_str());
        return false;
    }
    return true;
}

bool FFDecoder::StartDecodeThread()
{
    if(!m_pCodecContext)
    {
        m_szErrName = "decode context not init!";
        return false;
    }
    m_bIsRunDecodeThread = true;
    m_threadForDecode = std::thread(&FFDecoder::decodeInThread, this);
    return true;
}

void FFDecoder::SetProcessDataCallback(FFDecoder::ProcessYuvDataCallback callback)
{
    std::unique_lock<mutex> locker(m_mutexForFnProcessYuvData);
    m_fnProcssYuvData = callback;
}

void FFDecoder::SetDecodeThreadExitCallback(FFDecoder::DecodeThreadExitCallback callback)
{
    std::unique_lock<mutex> locker(m_mutexForFnThreadExit);
    m_fnThreadExit = callback;
}

void FFDecoder::StopDecodeThread()
{
    m_bIsRunDecodeThread = false;
}

float FFDecoder::GetVideoFrameRate()
{
    if(m_pInputFormatContext)
    {
        return (float)m_pInputFormatContext->streams[m_nVideoStreamIndex]->avg_frame_rate.num / m_pInputFormatContext->streams[m_nVideoStreamIndex]->avg_frame_rate.den ;
    }
    return 0;
}

void FFDecoder::decodeInThread()
{
    bool isEof = false;

    AVFramePtr pFrameScale(av_frame_alloc(), [](AVFrame * ptr){
        av_frame_free(&ptr);
        //釋放呼叫av_image_alloc申請的記憶體
        av_free(&ptr->data[0]);
    });

    int ret = 0;

    //初始化影象轉換器
    AVStream * pVideoStream = m_pInputFormatContext->streams[m_nVideoStreamIndex];
    SwsContext * pswsContext = sws_getContext(pVideoStream->codecpar->width, pVideoStream->codecpar->height, (AVPixelFormat)pVideoStream->codecpar->format,
                                              pVideoStream->codecpar->width, pVideoStream->codecpar->height, AV_PIX_FMT_YUV420P,
                                              SWS_FAST_BILINEAR, 0, 0, 0);
    //申請影象儲存記憶體
    av_image_alloc(pFrameScale->data, pFrameScale->linesize, pVideoStream->codecpar->width, pVideoStream->codecpar->height, AV_PIX_FMT_YUV420P, 1);

    while(m_bIsRunDecodeThread)
    {
        AVPacketPtr ptrAVPacket(av_packet_alloc(), [](AVPacket * ptr){
            av_packet_free(&ptr);
        });
        av_init_packet(ptrAVPacket.get());
        ret = av_read_frame(m_pInputFormatContext, ptrAVPacket.get());
        if(ret < 0)
        {
            string szErrName = GetStrError(ret).c_str();
            if(szErrName != "End of file")
                m_szErrName = MySprintf("FFMpeg Read Frame Failed, Err = %s", szErrName.c_str());
            else
            {
                MyLog(info, "FFMpeg Read Frame Complete!");
                isEof = true;
            }
            break;
        }
        if(ptrAVPacket->stream_index == m_nVideoStreamIndex)
        {
            ret = avcodec_send_packet(m_pCodecContext, ptrAVPacket.get());
            AVFramePtr pTempFrame(av_frame_alloc(), [](AVFrame * ptr){
                av_frame_free(&ptr);
            });
            while(1)
            {
                ret = avcodec_receive_frame(m_pCodecContext, pTempFrame.get());
                if(ret != 0)
                    break;
                ret= sws_scale(pswsContext, (const uint8_t* const*)pTempFrame->data, pTempFrame->linesize, 0, pTempFrame->height,
                               (uint8_t* const*)pFrameScale->data, pFrameScale->linesize);
                if(ret > 0)
                {
                    fileutil::FileRawData data;
                    data.AppendData(pFrameScale->data[0], pVideoStream->codecpar->width * pVideoStream->codecpar->height);
                    data.AppendData(pFrameScale->data[1], pVideoStream->codecpar->width * pVideoStream->codecpar->height / 4);
                    data.AppendData(pFrameScale->data[2], pVideoStream->codecpar->width * pVideoStream->codecpar->height / 4);
                    YuvDataPtr pYuvData(new fileutil::PictureFile(data, pVideoStream->codecpar->width, pVideoStream->codecpar->height, fileutil::PictureFile::kFormatYuv));
                    pYuvData->m_filename = to_string(pVideoStream->codec_info_nb_frames);
                    std::unique_lock<mutex> locker(m_mutexForFnProcessYuvData);
                    if(m_fnProcssYuvData)
                        m_fnProcssYuvData(pYuvData);
                }
            }
        }
    }
    MyLog(m_bIsRunDecodeThread && !isEof ? err : info, "FFDecoder %s exit!\n", m_bIsRunDecodeThread && !isEof ? "abnormal" : "normal");
    std::unique_lock<mutex> locker(m_mutexForFnThreadExit);
    if(m_fnThreadExit)
        m_fnThreadExit(m_bIsRunDecodeThread && !isEof);
}

參考文章