1. 程式人生 > >android平臺下基於ffmpeg採集Camera資料編碼成H.264推流到RTMP伺服器

android平臺下基於ffmpeg採集Camera資料編碼成H.264推流到RTMP伺服器

音視訊實踐學習

概述

直播中推流和拉流是最核心的兩個功能,之前的一篇部落格記錄過關於推流本地的mp4檔案到推流伺服器,而本節內容主要記錄從相機採集資料到編碼H264格式,最後通過RTMP推流出去的一個過程,為了從簡單入手,本節內容暫不涉及音訊的編碼操作

環境配置

作業系統:ubuntu 16.05
ndk版本:android-ndk-r16b
ffmpeg版本:ffmpeg-3.3.8

流程分析

開始實踐

android相機採集的資料預設是NV21格式的,並且前置攝像頭都是需要旋轉270度StartPublish主要完成一些初始化工作,並且標記transform=true

,此時相機的資料才會交由EncodeBuffer來處理,完成編碼H264資料並推送出去。

void H264Publisher::StartPublish() {

    //1.註冊所有元件
    av_register_all();
    //2.初始化網路
    avformat_network_init();

    //3.初始化輸出碼流的AVFormatContext
    avformat_alloc_output_context2(&out_fmt, NULL, "flv", outputPath);
    //4.查詢H.264編碼器
    pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!pCodec) {
        LOGE("avcodec not found!");
        return;
    }
    //5.分配編碼器並設定引數
    pCodecCtx = avcodec_alloc_context3(pCodec);
    //編碼器的ID,這裡是H264編碼器
    pCodecCtx->codec_id = pCodec->id;
    //編碼器編碼的資料型別
    pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    //畫素的格式
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    //前置攝像頭需要旋轉270度,寬和高要互換
    pCodecCtx->width = height;
    pCodecCtx->height = width;
    pCodecCtx->framerate = (AVRational) {fps, 1};
    pCodecCtx->time_base = (AVRational) {1, fps};
    pCodecCtx->gop_size = 50;
    pCodecCtx->max_b_frames = 0;
    pCodecCtx->qmin = 10;
    pCodecCtx->qmax = 50;
    pCodecCtx->bit_rate = 100 * 1024 * 8;
    pCodecCtx->level = 41;
    pCodecCtx->refs = 1;
    pCodecCtx->qcompress = 0.6;

    if (out_fmt->oformat->flags & AVFMT_GLOBALHEADER) {
        pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    //H.264
    AVDictionary *opts = NULL;
    if (pCodecCtx->codec_id == AV_CODEC_ID_H264) {
        av_dict_set(&opts, "preset", "superfast", 0);
        av_dict_set(&opts, "tune", "zerolatency", 0);
    }
    //6.開啟編碼器
    int result = avcodec_open2(pCodecCtx, pCodec, &opts);
    if (result < 0) {
        LOGE("open encoder failed %d", result);
        return;
    }

    //7.建立一個輸出流
    pStream = avformat_new_stream(out_fmt, pCodec);
    if (!pStream) {
        LOGE("Failed allocating output outputPath");
        return;
    }
    pStream->time_base.num = 1;
    pStream->time_base.den = fps;
    pStream->codecpar->codec_tag = 0;
    if (avcodec_parameters_from_context(pStream->codecpar, pCodecCtx) < 0) {
        LOGE("Failed av codec parameters_from_context");
        return;
    }

    //8.開啟網路輸出流
    if (avio_open(&out_fmt->pb, outputPath, AVIO_FLAG_READ_WRITE) < 0) {
        LOGE("Failed to open output file!\n");
        return;
    }
    //9.寫檔案頭部
    result = avformat_write_header(out_fmt, NULL);
    if (result < 0) {
        LOGE("Error occurred when opening output URL %d", result);
        return;
    }

    //初始化幀
    pFrame = av_frame_alloc();
    pFrame->width = pCodecCtx->width;
    pFrame->height = pCodecCtx->height;
    pFrame->format = pCodecCtx->pix_fmt;
    int bufferSize = av_image_get_buffer_size(pCodecCtx->pix_fmt, pCodecCtx->width,
                                              pCodecCtx->height, 1);
    pFrameBuffer = (uint8_t *) av_malloc(bufferSize);
    av_image_fill_arrays(pFrame->data, pFrame->linesize, pFrameBuffer, pCodecCtx->pix_fmt,
                         pCodecCtx->width, pCodecCtx->height, 1);

    //建立已編碼幀
    av_new_packet(&avPacket, bufferSize * 3);

    //標記正在轉換
    this->transform = true;
}

接著就是核心的編碼函式

int H264Publisher::EncodeFrame(AVCodecContext *pCodecCtx, AVFrame *pFrame, AVPacket *avPacket) {
    int ret = avcodec_send_frame(pCodecCtx, pFrame);
    if (ret < 0) {
        //failed to send frame for encoding
        return -1;
    }
    while (!ret) {
        ret = avcodec_receive_packet(pCodecCtx, avPacket);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return 0;
        } else if (ret < 0) {
            //error during encoding
            return -1;
        }

        //AVFrame PTS
        pFrame->pts = index;

        //編碼資料
        avPacket->stream_index = pStream->index;
        AVRational time_base = out_fmt->streams[0]->time_base;//{ 1, 1000 };
        avPacket->pts = index * (pStream->time_base.den) / ((pStream->time_base.num) * fps);
        avPacket->dts = avPacket->pts;
        avPacket->duration = (pStream->time_base.den) / ((pStream->time_base.num) * fps);
        LOGI("Send frame index:%d,pts:%lld,dts:%lld,duration:%lld,time_base:%d,%d",
             index,
             (long long) avPacket->pts,
             (long long) avPacket->dts,
             (long long) avPacket->duration,
             time_base.num, time_base.den);
        avPacket->pos = -1;
        //寫出資料
        ret = av_interleaved_write_frame(out_fmt, avPacket);
        if (ret != 0) {
            LOGE("av_interleaved_write_frame failed");
        }
        av_packet_unref(avPacket);

        index++;
    }
    return 0;
}

void H264Publisher::EncodeBuffer(unsigned char *nv21Buffer) {

    uint8_t *i420_y = pFrameBuffer;
    uint8_t *i420_u = pFrameBuffer + width * height;
    uint8_t *i420_v = pFrameBuffer + width * height * 5 / 4;

    //NV21轉I420
    libyuv::ConvertToI420(nv21Buffer, width * height, i420_y, height, i420_u, height / 2, i420_v,
                          height / 2, 0, 0, width, height, width, height, libyuv::kRotate270,
                          libyuv::FOURCC_NV21);

    pFrame->data[0] = i420_y;
    pFrame->data[1] = i420_u;
    pFrame->data[2] = i420_v;

    //編碼H.264
    EncodeFrame(pCodecCtx, pFrame, &avPacket);
}

我們結束推流的時候,將transform標記為false狀態,然後處理完最後一幀資料,最後關閉相關的控制代碼,回收資源。

void H264Publisher::StopPublish() {
    //標記轉換結束
    this->transform = false;

    int result = EncodeFrame(pCodecCtx, NULL, &avPacket);
    if (result >= 0) {
        //封裝檔案尾
        av_write_trailer(out_fmt);
        //釋放記憶體
        if (pCodecCtx != NULL) {
            avcodec_close(pCodecCtx);
            avcodec_free_context(&pCodecCtx);
            pCodecCtx = NULL;
        }
        if (pFrame != NULL) {
            av_free(pFrame);
            pFrame = NULL;
        }
        if (pFrameBuffer != NULL) {
            av_free(pFrameBuffer);
            pFrameBuffer = NULL;
        }
        if (out_fmt != NULL) {
            avio_close(out_fmt->pb);
            avformat_free_context(out_fmt);
            out_fmt = NULL;
        }
    }
}

注意:之前也說過了,android相機預設採集的資料是NV21格式的,我們是無法直接處理的,需要先將NV21格式轉換成Y420P格式的資料,並且旋轉270度(後置相機旋轉90度),得到最終的資料,送入編碼器中進行編碼成H264格式的資料,最後才通過av_interleaved_write_frame推流到伺服器上去。

ffplay播放我們的區域網直播流rtmp://192.168.1.102:1935/onzhou/live

ffplay rtmp://192.168.1.102:1935/onzhou/live

存在的問題

目前只處理了視訊編碼推流部分,為了更方便的瞭解推流過程,音訊編碼暫未處理,後續會有音訊相關的內容。

參考:
專案地址:ffmpeg-camera-stream
https://github.com/byhook/ffmpeg4android

參考:
https://blog.csdn.net/leixiaohua1020/article/details/47056051
https://blog.csdn.net/leixiaohua1020/article/details/39803457
https://blog.csdn.net/bixinwei22/article/details/78770090