1. 程式人生 > >FFMpeg實現視訊的縮放

FFMpeg實現視訊的縮放

#ifndef _IO_FILE_H_
#define _IO_FILE_H_

typedef struct _IOFile
{
	char *inputName;		/*輸入檔名*/
	char *outputName;	        /*輸出檔名*/
	char *inputFrameSize;    /*輸入影象尺寸*/
	char *outputFrameSize;  /*輸出影象尺寸*/

	FILE *iFile;				/*輸入檔案指標*/
	FILE *oFile;				/*輸出檔案指標*/
}IOFile;

#endif
#ifndef _FFMPEG_AV_SCALING_H_
#define _FFMPEG_AV_SCALING_H_

#include 
#include 
#include 

#endif
#ifndef _COMMON_H_
#define _COMMON_H_

#include 
#include "ffmpeg_av_scaling.h"
#include "IOFile.h"

typedef int bool;

#define false 0
#define true 1

#endif
#include "common.h"

#define MAX_FRAME_NUM (100)

/*解析輸入的引數*/
static bool hello(int argc, char **argv, IOFile *files)
{
	printf("FFMpeg Scaling Demo.\nCommand format: %s input_file input_frame_size output_file output_frame_size\n", argv[0]);
	if (argc != 5)
	{
		printf("Error: command line error, please re-check.\n");
		return false;
	}

	files->inputName = argv[1];
	files->inputFrameSize = argv[2];
	files->outputName = argv[3];
	files->outputFrameSize = argv[4];

	files->iFile=fopen(files->inputName, "rb+");
	if (!files->iFile)
	{
		printf("Error: cannot open input file.\n");
		return false;
	}

	files->oFile=fopen(files->outputName, "wb+");
	if (!files->oFile)
	{
		printf("Error: cannot open output file.\n");
		return false;
	}

	return true;
}

/*************************************************
	Function:		read_yuv_from_ifile
	Description:	從輸入檔案中讀取畫素資料
	Calls:			無
	Called By:		main
	Input:			(in)srcWidth : 輸入影象的寬度
					(in)srcHeight : 輸入影象的的高度
					(in)color_plane :顏色分量:0——Y;1——U;2——V
					(in)files : 包含輸入檔案的結構
	Output: 		(out)src_data : 儲存輸入資料的快取
					(out)src_linesize :
	Return: 		true : 命令列解析正確
					false : 命令列解析錯誤
*************************************************/
static int read_yuv_from_ifile(unsigned char *src_data[4], int src_linesize[4],int srcWidth,int srcHeight,int color_plane,IOFile *files)
{
	int frame_height = color_plane == 0?srcHeight : srcHeight/2;
	int frame_width = color_plane == 0?srcWidth : srcWidth/2;
	int frame_size = frame_width*frame_height;
	int frame_stride = src_linesize[color_plane];
	int row_idx;
	
	/*
	linesize =  width + padding size(16+16) for YUV
	*/
	if (frame_width == frame_stride)
	{
	 	/*寬度和跨度相等,畫素資訊是連續存放*/
		fread(src_data[color_plane],frame_size, 1, files->iFile);
	}
	else
	{
		/*寬度小於跨度,畫素資訊儲存空間之間存在間隔*/
		for ( row_idx = 0 ; row_idx < frame_height ; row_idx++ )
		{
		 	fread(src_data[color_plane]+row_idx*frame_stride, frame_width, 1, files->iFile);
		}
		
	}
	
	
	//printf("frame_size:%d\n",frame_size);
	return frame_size;
}

int main(int argc, char **argv)
{
	int ret = 0;
	int srcWidth,srcHeight;
	int dstWidth,dstHeight;
	
	/*解析命令列輸入引數*/
	IOFile files = {NULL};

	if ( !hello(argc,argv,&files) )
	{
	 	goto FF_END;
	}

	/*
	根據framesize解析出width和height,比如320x240,解析出
	寬為320,高為240
	*/
	if (av_parse_video_size(&srcWidth,&srcHeight,files.inputFrameSize))
	{
	 	printf("Error:parsing input size failed.\n");
		goto FF_END;
	}

	if (av_parse_video_size(&dstWidth,&dstHeight,files.outputFrameSize))
	{
	 	printf("Error:parsing output size failed.\n");
		goto FF_END;
	}

	/*建立SwsContext結構*/
	enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUV420P;
	enum AVPixelFormat dst_pix_fmt = AV_PIX_FMT_YUV420P;	
	/*設定影象縮放上下文資訊,函式引數資訊如下:
	定義輸入影象資訊,輸出影象資訊,SWS_BILINEAR選擇的縮放影象演算法。(當輸入輸出影象大小不一樣時才有效)
	最後三個NULL的三個引數表示,前兩個分別定義輸入輸出影象濾波器資訊,如果不做前後影象濾波,預設為NULL,
	最後一個引數定義了特定縮放演算法所需要的引數,預設為NULL
	*/
	printf("srcw:%d,h:%d,dstw:%d,h:%d\n",srcWidth,srcHeight,dstWidth,dstHeight);
	struct SwsContext *sws_ctx = sws_getContext(srcWidth,srcHeight,src_pix_fmt,dstWidth,dstHeight,dst_pix_fmt,SWS_BILINEAR,NULL,NULL,NULL);
	if (!sws_ctx)
	{
	 	printf("Error: allocating SwsContext struct failed.\n");
		goto FF_END;
	}

	/*分配input和output*/
	unsigned char *src_data[4],*dst_data[4];
	int src_linesize[4],dst_linesize[4];

	/*
	根據w,h,pixfmt,分配影象空間,填充srcdata和linesize,最後返回的大小
	對應linesize空間大小
	*/
	if ((ret = av_image_alloc(src_data,src_linesize,srcWidth,srcHeight,src_pix_fmt,32)) < 0)
	{
	 	printf("Error:allocating src image failed.\n");
		goto FF_END;
	}
	
	if ((ret = av_image_alloc(dst_data,dst_linesize,dstWidth,dstHeight,dst_pix_fmt,1)) < 0)
	{
	 	printf("Error:allocating dst image failed.\n");
		goto FF_END;
	}

	/*從輸出frame中寫出到輸出檔案*/
	int dst_bufsize = ret;
	int idx;
	for ( idx = 0 ; idx < MAX_FRAME_NUM ; idx++ )
	{
	 	read_yuv_from_ifile(src_data, src_linesize, srcWidth, srcHeight, 0, &files);
		read_yuv_from_ifile(src_data, src_linesize, srcWidth, srcHeight, 1, &files);
		read_yuv_from_ifile(src_data, src_linesize, srcWidth, srcHeight, 2, &files);

		/*轉換處理*/
		sws_scale(sws_ctx, (const unsigned char * const *)src_data,src_linesize, 0, srcHeight, dst_data, dst_linesize);

		fwrite(dst_data[0], 1, dst_bufsize, files.oFile);
	}
	printf("Video scaling succeeded.\n");
	
FF_END:
	fclose(files.iFile);
	fclose(files.oFile);
	av_freep(&src_data[0]);
	av_freep(&dst_data[0]);
	sws_freeContext(sws_ctx);
	
	return 0;
}