1. 程式人生 > >Android Camera預覽時輸出的幀率控制

Android Camera預覽時輸出的幀率控制

如果使用MediaCodec硬編碼H264,可以使用下面的方法控制編碼輸出的幀率:

        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);  
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);      
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);  
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);  
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);  
        try {  
            mediaCodec = MediaCodec.createEncoderByType("video/avc");  
        } catch (IOException e) {  
            // TODO Auto-generated catch block  
            e.printStackTrace();  
        }  
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);  
        mediaCodec.start();  

但如果是採用預覽模式下得到預覽的BITMAP,然後通過x264編碼的來實現的方式,則需要應用層控制預覽幀的幀幀率,Camera本來提供了兩個介面來控制預覽幀率,但從多個平臺的適配發現,基本上都不能有效的控制Camera預覽輸出的幀率:
setPreviewFrameRate是在api level1就開始使用了,然後不是簡單地設定這個方法就可以讓攝像頭每秒捕獲多少幀數的。 
比如我設定2,它一秒不會只捕獲2幀資料的,從日誌記錄來看,相當糟糕,不會是預期的2幀,於是我查詢文件,發現這個方法已經廢除了。 

在api level9時加入了一個方法setPreviewFpsRange (int min, int max) 
預覽幀數從min到max,這個值再*1000. 
這個方法已經在高版本的sdk中取代了舊的setPreviewFrameRate。 

====================2017-05-15 更新 ===================

當初把這個問題複雜化了,其實對預覽幀的回撥做下控制就能達到該目的,程式碼:

	private final static int MAX_FPS = 15;    //視訊通話控制在15幀是足夠的 
	private final static int FRAME_PERIOD = (1000 / MAX_FPS); // the frame period
	long lastTime = 0;  
	long timeDiff = 0;  
	int framesSkipped = 0; // number of frames being skipped 
	int framesRecevied = 0; // number of frames being skipped
	int framesSended = 0; // number of frames being skipped 

	private PreviewCallback previewCallback = new PreviewCallback() {

		public void onPreviewFrame(byte[] _data, Camera _camera) { 
				timeDiff = System.currentTimeMillis() - lastTime;
				framesRecevied++;
				if (timeDiff < FRAME_PERIOD){
					framesSkipped++;

					if (NgnProxyVideoProducer.sAddCallbackBufferSupported) {
						// do not use "_data" which could be null (e.g. on GSII)
						NgnCameraProducer.addCallbackBuffer(_camera,
								_data == null ? mVideoCallbackData : _data);
					}
					Log.d(TAG, "ii loglized diserved framesSkipped:"+framesSkipped + ",framesRecevied:"+framesRecevied + ", framesSended:"+framesSended); 
					return;
				}
				lastTime = System.currentTimeMillis();
				framesSended++;
				// add end.


//doing other thing.
				
}


====================2017-05-15 更新 ===================


上面是文章: Android camera 預覽幀數和視訊通話圖片快取 中提到為什麼預覽幀設定失效的問題,並且也給出了一個控制預覽幀的方式,這裡提供另外一種類似的實現:無鎖佇列的實現。

下面 RingBuffer的定義:

	final byte STATU_INIT = 0; 
	final byte STATU_WAIT_DEQEUE = 1; 
	class UserDefinedBuffer{
		ByteBuffer mVideoFrame;
		byte status;
	}
	//the ring queue
	class RingBuffer{
		int r_index;
		int w_index;
		int size;
		UserDefinedBuffer[] mUserDefinedBuffer; 
		long last_time;
	
		public RingBuffer(int max_size, int capacity){			
			mUserDefinedBuffer = new UserDefinedBuffer[max_size]; 
			r_index = w_index = 0;
			size = max_size;
			for(int i=0 ;i<max_size; i++){
				mUserDefinedBuffer[i] = new UserDefinedBuffer();
				mUserDefinedBuffer[i].mVideoFrame = ByteBuffer.allocateDirect(capacity); 
			}						
		}
		
		public UserDefinedBuffer getUserDefinedBuffer(int index){
			return mUserDefinedBuffer[index];
		}
		
		int getRingW(){
			return w_index;
		}
		
		int getRingR(){
			return r_index;
		}
		int getRingSize(){
			return size;
		}
		  
		void  setUserDefinedBufferStatus(int index, byte status){
			synchronized(mUserDefinedBuffer[index]){
				mUserDefinedBuffer[index].status = status;
			}
		}
		
		byte getUserDefinedBufferStatus(int index){
			synchronized(mUserDefinedBuffer[index]){
				return mUserDefinedBuffer[index].status;
			} 
		}
		
		void enqueue(byte[] _data){		 
			int index = w_index & (size -1);
			Log.i(TAG, "#enqueue, index:"+index);
			if (index >= size){
				index = 0;
			}
			if (getUserDefinedBufferStatus(index) != STATU_INIT){
				Log.i(TAG, "i enqueue, index:"+index+", not dequeue" + ", STATUS:"+getUserDefinedBufferStatus(index));
				return;
			}
			setUserDefinedBufferStatus(index, STATU_WAIT_DEQEUE);
			mUserDefinedBuffer[index].mVideoFrame.rewind();
			mUserDefinedBuffer[index].mVideoFrame.put(_data);
			w_index += 1; 		
		}
		
		void enqueue(ByteBuffer data){			 
			int index = w_index & (size -1);
			Log.i(TAG, "enqueue, index:"+index);
			if (index >= size){
				index = 0;
			}
			if (getUserDefinedBufferStatus(index) != STATU_INIT){
				Log.i(TAG, "ii enqueue, index:"+index+", not dequeue" + ", STATUS:"+getUserDefinedBufferStatus(index));
				return;
			}
			setUserDefinedBufferStatus(index, STATU_WAIT_DEQEUE);
			mUserDefinedBuffer[index].mVideoFrame.rewind();
			mUserDefinedBuffer[index].mVideoFrame.put(data); 
			w_index += 1; 
			//last_time = System.currentTimeMillis();
		}     
		
		long getLastTime(){
			return last_time;
		}
		
		int dequeue(){		 
			int index = r_index & (size -1); 
			if (index == (w_index & (size -1))){
				Log.i(TAG, "dequeue, w_index:"+w_index + ", r_index:"+r_index);
				return -1;
			}
			Log.i(TAG, "dequeue, index:"+index);
			
			r_index += 1; 
//			ByteBuffer data =  mUserDefinedBuffer[index].mVideoFrame;
//			mUserDefinedBuffer[index].mVideoFrame.rewind();
			return index;  
		} 
	};

 出隊執行緒: 

	class PushVideoThread extends Thread{  
		boolean mExitFlag = false;  
		
		public void setExitFlg(boolean bFlag){
			mExitFlag = bFlag;
		}
		 
	    @Override
	    public void run() {
			android.os.Process
			.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

	    	Log.i(TAG, "PushVideoThread() run start.");
			final int delay = (100/mFps);//
	    	while(!mExitFlag){
	            long start=System.currentTimeMillis();  
	            if (mRingBuffer == null){
					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
	            }
				int index = mRingBuffer.dequeue(); 
				if (index == -1){
					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
				}
				if (STATU_WAIT_DEQEUE != mRingBuffer.getUserDefinedBufferStatus(index)){

				    Log.i(TAG, "Ana  dequeue mRingBuffer.getUserDefinedBufferStatus(index):"+mRingBuffer.getUserDefinedBufferStatus(index));

					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
				}
				UserDefinedBuffer userDefindedBuffer = mRingBuffer.getUserDefinedBuffer(index);
				
	    		ByteBuffer byteBuffer = userDefindedBuffer.mVideoFrame;
	    		if (byteBuffer != null){
					framesRendered++;
					if ((framesRendered % 100) == 0) {
						logStatistics();
						framesRendered = 0;
						startTimeNs= System.nanoTime();
					}
//				    Log.i(TAG, "Ana  dequeue getRingW:"+ write +",getRingR:"+ read);
	    			mProducer.push(byteBuffer, mVideoFrame.capacity());
	    			mRingBuffer.setUserDefinedBufferStatus(index, STATU_INIT);
	    		}

                long end=System.currentTimeMillis();  
                if ((end - start) < delay && (end - start) > 0){
        			try {
        				long value = delay - (end -start);
        				if (value > 0){
        					Thread.sleep(value);
        				}
    				} catch (Exception e) {
    					// TODO Auto-generated catch block
    					e.printStackTrace();
    				} 
                }
	    	}
	    	Log.i(TAG, "PushVideoThread() run End.");
	    }
	}

RingBuffer的初始化:
	static final int MAX_SIZE = 64;//must 2 mul
	RingBuffer mRingBuffer;	 
	void initRingBuffer(){   
		mRingBuffer = new RingBuffer(MAX_SIZE, mVideoFrame.capacity()); 
	}
	
入隊:
public void onPreviewFrame(byte[] _data, Camera _camera) {
mRingBuffer.enqueue(_data);
}


====================2017-05-15 更新 ===================