1. 程式人生 > >android平臺,利用ffmpeg對android攝像頭採集編碼

android平臺,利用ffmpeg對android攝像頭採集編碼

對於這個問題,我也是折騰了好幾天終於把這個問題解決了。不多說,進入主題:

首先是demo下載地址:http://download.csdn.net/detail/zh_ang_hua/8971915;
這個下載地址demo有bug,已修改,新的現在地址:http://download.csdn.net/detail/zh_ang_hua/8986541 點選開啟連結

開始啦:

我假定你已經編譯好了ffmpeg庫,只需要自己編寫JNI封裝介面即可。

java上層程式碼:
package com.hua.cameraandroidtest;
import java.io.File;
import java.util.Calendar;


import com.hua.cameraandroidtest.R;


import android.annotation.SuppressLint;
import android.app.Activity;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.RelativeLayout;


public class MainActivity extends Activity implements Callback, PictureCallback {


SurfaceView sView;
SurfaceHolder surfaceHolder;
RelativeLayout mButtonsLayout;
RelativeLayout mMainLayout;
Button mStartButton, mStopButton;
ButtonsHandler mHandler;
Camera camera;
double mVisibityTime;
boolean mIsVisibity;
boolean mIsStartPre;
final int MSG_CHECK_PROESS = 10001;// "msg_check_proess";
final int MSG_CHECK_TOUCH = 10002;// "msg_check_touch";
final int MSG_WRITE_YUVDATA = 10003;


@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.mainlayout);


sView = (SurfaceView) this.findViewById(R.id.surfaceid);
sView.setOnTouchListener(new OnTouchListener() {


@Override
public boolean onTouch(View arg0, MotionEvent arg1) {
// TODO Auto-generated method stub
mHandler.sendEmptyMessage(MSG_CHECK_TOUCH);
AutoFocus();
return false;
}


});
mButtonsLayout = (RelativeLayout) this.findViewById(R.id.buttonsid);
mStartButton = (Button) this.findViewById(R.id.button1);
mStartButton.setOnClickListener(new OnClickListener() {


@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
if (mIsStartPre == false) {
mIsStartPre = true;
AutoFocus();
Calendar cc = Calendar.getInstance();
cc.setTimeInMillis(System.currentTimeMillis());
String filename = Environment.getExternalStorageDirectory().getAbsolutePath() + "/"
+ String.valueOf(cc.get(Calendar.YEAR))
+ "-"
+ String.valueOf(cc.get(Calendar.MONTH))
+ "-"
+ String.valueOf(cc.get(Calendar.DAY_OF_YEAR))
+ "-"
+ String.valueOf(cc.get(Calendar.HOUR_OF_DAY))
+ "-"
+ String.valueOf(cc.get(Calendar.MINUTE))
+ "-"
+ String.valueOf(cc.get(Calendar.SECOND))
+ ".mp4";
videoinit(filename.getBytes());
}


}


});
mStopButton = (Button) this.findViewById(R.id.button2);
mStopButton.setOnClickListener(new OnClickListener() {


@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
if (mIsStartPre == true) {
mIsStartPre = false;
videoclose();


}


}


});
mIsStartPre = false;
surfaceHolder = sView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mHandler = new ButtonsHandler();
mHandler.sendEmptyMessage(MSG_CHECK_PROESS);
mHandler.sendEmptyMessage(MSG_CHECK_TOUCH);


}


@Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
}


@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
}


@Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
}


@Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub


}


@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
Camera.Parameters p = camera.getParameters();
p.setPreviewSize(352, 288);
p.setPictureFormat(PixelFormat.JPEG); // Sets the image format for
// picture 設定相片格式為JPEG,預設為NV21
p.setPreviewFormat(PixelFormat.YCbCr_420_SP); // Sets the image format
// for preview
// picture,預設為NV21
// p.setRotation(90);
camera.setPreviewCallback(new PreviewCallback() {


@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
if (mIsStartPre == true) {
Message msg = new Message();
Bundle bl = new Bundle();
bl.putByteArray("messageyuvdata", arg0);
msg.setData(bl);
msg.what = MSG_WRITE_YUVDATA;
mHandler.sendMessage(msg);
}
}


});
camera.setParameters(p);
try {
camera.setPreviewDisplay(surfaceHolder);
} catch (Exception E) {


}
camera.startPreview();
}


@Override
public void surfaceCreated(SurfaceHolder arg0) {
// TODO Auto-generated method stu
camera = Camera.open();
}


@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
// TODO Auto-generated method stub
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
}


public void AutoFocus() {
if (camera != null) {
camera.autoFocus(new AutoFocusCallback() {


@Override
public void onAutoFocus(boolean arg0, Camera arg1) {
// TODO Auto-generated method stub
//
}


});
}
}


@SuppressLint("HandlerLeak")
class ButtonsHandler extends Handler {


public ButtonsHandler() {
super();
}


public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case MSG_CHECK_PROESS:
if (mIsVisibity
&& (System.currentTimeMillis() - mVisibityTime > 7000)) {
mButtonsLayout.setVisibility(View.INVISIBLE);
// mLinerLaryout.setFocusable(false);
mIsVisibity = false;
mVisibityTime = 0;
}
sendEmptyMessageDelayed(MSG_CHECK_PROESS, 500);


break;
case MSG_CHECK_TOUCH:
if (mButtonsLayout.getVisibility() != View.VISIBLE) {
mButtonsLayout.setVisibility(View.VISIBLE);
// mLinerLaryout.setFocusable(true);


}
mIsVisibity = true;
mVisibityTime = System.currentTimeMillis();
break;
case MSG_WRITE_YUVDATA:
byte[] bytedata = msg.getData().getByteArray("messageyuvdata");
if (bytedata != null) {
addVideoData(bytedata);
}
break;
}
};
};


public synchronized void addVideoData(byte[] data) {
videostart(data);
}


public native int videoinit(byte[] filename);


public native int videostart(byte[] yuvdata);


public native int videoclose();


static {
System.loadLibrary("ffmpeg");
System.loadLibrary("ffmpeg_encoder_jni");
}


}


載入編譯好的兩個庫,在ffmpeg_encoder_jni庫中定義了三個native介面,這裡需要呼叫。

開啟攝像頭,點選開始就是呼叫videoinit,開始初始化編碼環境。

@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
if (mIsStartPre == true) {
Message msg = new Message();
Bundle bl = new Bundle();
bl.putByteArray("messageyuvdata", arg0);
msg.setData(bl);
msg.what = MSG_WRITE_YUVDATA;
mHandler.sendMessage(msg);
}
}


這個回撥介面就是將每一幀的畫面資料擷取丟給ffmpeg處理。我們傳送訊息給handler,處理,實際上就是呼叫videostart處理,這個函式就是將每一幀的資料丟給ffmpeg編碼處理。

結束就呼叫videoclose,清除初始化的資源。

上層很簡單。

好了,那我們來看看native的介面實現:

#include <stdio.h>
#include <time.h> 


#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/log.h"


#ifdef ANDROID
#include <jni.h>
#include <android/log.h>
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  "(^_^)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...)  printf("(>_<) " format "\n", ##__VA_ARGS__)
#define LOGI(format, ...)  printf("(^_^) " format "\n", ##__VA_ARGS__)
#endif




AVCodecContext *pCodecCtx= NULL;  
AVPacket avpkt;  
FILE * video_file;  
unsigned char *outbuf=NULL;  
unsigned char *yuv420buf=NULL;
AVFrame * yuv420pframe = NULL;  
static int outsize=0;  
static int mwidth = 352;
static int mheight = 288;
int count = 0;
/* 
* encording init 
*/  
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videoinit(JNIEnv * env, jclass obj,jbyteArray filename)  
{  
    LOGI("%s\n",__func__);  
    AVCodec * pCodec=NULL;  
    avcodec_register_all();  
    pCodec=avcodec_find_encoder(AV_CODEC_ID_MPEG4);  //AV_CODEC_ID_H264//AV_CODEC_ID_MPEG1VIDEO
    if(pCodec == NULL) {  
        LOGE("++++++++++++codec not found\n");  
        return -1;  
    }   
    pCodecCtx=avcodec_alloc_context3(pCodec);  
    if (pCodecCtx == NULL) {  
        LOGE("++++++Could not allocate video codec context\n");  
        return -1;  
    }  
    /* put sample parameters */  
    pCodecCtx->bit_rate = 450000;  
    /* resolution must be a multiple of two */  
    pCodecCtx->width = mwidth;  
    pCodecCtx->height = mheight;  
    /* frames per second */  
    pCodecCtx->time_base= (AVRational){1,25};  
    pCodecCtx->gop_size = 10; /* emit one intra frame every ten frames */  
    pCodecCtx->max_b_frames=1;  
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
    /* open it */  
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {  
        LOGE("+++++++Could not open codec\n");  
        return -1;  
    }  
    outsize = mwidth * mheight*2;  
    outbuf = malloc(outsize*sizeof(char));  
    yuv420buf = malloc(outsize*sizeof(char));  
    jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0);  
    if ((video_file = fopen(filedir, "wb")) == NULL) {  
        LOGE("++++++++++++open %s failed\n",filedir);  
        return -1;  
    }  
    (*env)->ReleaseByteArrayElements(env, filename, filedir, 0);  
    return 1;  
}  
  
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart(JNIEnv * env, jclass obj,jbyteArray yuvdata)  
{  
    int frameFinished=0,size=0;
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart1"); 
    jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
    LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart2"); 
    yuv420pframe=NULL; 
    //AVFrame * yuv422frame=NULL;  
    //struct SwsContext *swsctx = NULL;
av_init_packet(&avpkt);
avpkt.data = NULL;    // packet data will be allocated by the encoder
    avpkt.size = 0; 
    yuv420pframe=avcodec_alloc_frame();
int y_size = pCodecCtx->width * pCodecCtx->height;
uint8_t* picture_buf;
    int size1 = avpicture_get_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
    picture_buf = (uint8_t*)av_malloc(y_size);  
    if (!picture_buf)  
    {  
        av_free(yuv420pframe);  
     }  
    avpicture_fill((AVPicture*)yuv420pframe, picture_buf, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); 

    //yuv422frame=avcodec_alloc_frame();
yuv420pframe->pts = count;
yuv420pframe->data[0] = ydata;  //PCM Data
yuv420pframe->data[1] = ydata+ y_size;      // U 
yuv420pframe->data[2] = ydata+ y_size*5/4;  // V
    size = avcodec_encode_video2(pCodecCtx, &avpkt, yuv420pframe, &frameFinished);
count++; 
    if (size < 0) {  
        LOGE("+++++Error encoding frame\n");  
        return -1;  
    }  
    if(frameFinished)  
        fwrite(avpkt.data,1,avpkt.size,video_file);
    av_free_packet(&avpkt);  
    //sws_freeContext(swsctx);  
    av_free(yuv420pframe);  
    //av_free(yuv422frame);  
    (*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);  
}  
  
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videoclose(JNIEnv * env, jclass obj)  
{  
    fclose(video_file);  
    avcodec_close(pCodecCtx);  
    av_free(pCodecCtx);
av_freep(&yuv420pframe->data[0]);
    av_frame_free(&yuv420pframe); 
    free(outbuf);  
}  


ps:這裡不講JNI方面的技術,不知道的可以Google相關的文章和書籍。

看看初始化和結束的函式:

這裡也不多講,不知道的可以去看一下雷霄驊大神的相關部落格

http://blog.csdn.net/leixiaohua1020/article/details/44220151 等等

我這裡主要講一下編碼

yuv420pframe->pts = count;
yuv420pframe->data[0] = ydata;  //PCM Data
yuv420pframe->data[1] = ydata+ y_size;      // U 
yuv420pframe->data[2] = ydata+ y_size*5/4;  // V
    size = avcodec_encode_video2(pCodecCtx, &avpkt, yuv420pframe, &frameFinished);

這裡的ydata就是上層onPreviewFrame回撥獲取的byte資料傳下來的。

最重要的一點,yuv資料的寬度和高度必須和上層顯示的寬度和高度是一致的。否則 avcodec_encode_video2編碼會失敗的。

今天 就先講到這吧,有時間在研究研究ffmpeg在android平臺其他方面的東西吧。

謝謝大家