1. 程式人生 > >ffmpeg實戰教程(七)Android CMake avi解碼後SurfaceView顯示

ffmpeg實戰教程(七)Android CMake avi解碼後SurfaceView顯示

#include <jni.h> #include <android/log.h> #include <android/native_window.h> #include <android/native_window_jni.h> extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavutil/imgutils.h" //}; #define LOG_TAG "ffmpegandroidplayer"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__) JNIEXPORT jint JNICALL Java_com_ws_ffmpegandroidplayer_MainActivity_play (JNIEnv *env, jclass clazz, jobject surface) { LOGD("play"); // sd卡中的視訊檔案地址,可自行修改或者通過jni傳入 //char *file_name = "/storage/emulated/0/ws2.mp4";
char *file_name = "/storage/emulated/0/video.avi"; av_register_all(); AVFormatContext *pFormatCtx = avformat_alloc_context(); // Open video file if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) { LOGD("Couldn't open file:%s\n", file_name); return
-1; // Couldn't open file } // Retrieve stream information if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGD("Couldn't find stream information."); return -1; } // Find the first video stream int videoStream = -1, i; for (i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) { videoStream = i; } } if (videoStream == -1) { LOGD("Didn't find a video stream."); return -1; // Didn't find a video stream } // Get a pointer to the codec context for the video stream AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { LOGD("Codec not found."); return -1; // Codec not found } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGD("Could not open codec."); return -1; // Could not open codec } // 獲取native window ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface); // 獲取視訊寬高 int videoWidth = pCodecCtx->width; int videoHeight = pCodecCtx->height; // 設定native window的buffer大小,可自動拉伸 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888); ANativeWindow_Buffer windowBuffer; if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGD("Could not open codec."); return -1; // Could not open codec } // Allocate video frame AVFrame *pFrame = av_frame_alloc(); // 用於渲染 AVFrame *pFrameRGBA = av_frame_alloc(); if (pFrameRGBA == NULL || pFrame == NULL) { LOGD("Could not allocate video frame."); return -1; } // Determine required buffer size and allocate buffer // buffer中資料就是用於渲染的,且格式為RGBA int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); // 由於解碼出來的幀格式不是RGBA的,在渲染之前需要進行格式轉換 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); int frameFinished; AVPacket packet; while (av_read_frame(pFormatCtx, &packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // 並不是decode一次就可解碼出一幀 if (frameFinished) { // lock native window buffer ANativeWindow_lock(nativeWindow, &windowBuffer, 0); // 格式轉換 sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGBA->data, pFrameRGBA->linesize); // 獲取stride uint8_t *dst = (uint8_t *) windowBuffer.bits; int dstStride = windowBuffer.stride * 4; uint8_t *src = (pFrameRGBA->data[0]); int srcStride = pFrameRGBA->linesize[0]; // 由於window的stride和幀的stride不同,因此需要逐行復制 int h; for (h = 0; h < videoHeight; h++) { memcpy(dst + h * dstStride, src + h * srcStride, srcStride); } ANativeWindow_unlockAndPost(nativeWindow); } } av_packet_unref(&packet); } av_free(buffer); av_free(pFrameRGBA); // Free the YUV frame av_free(pFrame); // Close the codecs avcodec_close(pCodecCtx); // Close the video file avformat_close_input(&pFormatCtx); return 0; } }