1. 程式人生 > >C# 使用SDL2實現Mp4檔案播放音視訊

C# 使用SDL2實現Mp4檔案播放音視訊

播放音視訊的關鍵:視訊的格式是H264,音訊的格式是AAC。使用ffmpeg探測流的方式來實現音視訊流的解碼播放。

資料處理邏輯:H264->YUV     AAC->PCM。

SDL2工具類

using SDL2;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;

namespace CvNetVideo
{
    public unsafe class SDLHelper
    {
        private IntPtr screen;
        private IntPtr sdlrenderer;
        private IntPtr sdltexture;
        SDL.SDL_Rect sdlrect;
        SDL.SDL_Event sdlevent;
        bool isInit = false;
        public SDLHelper()
        {



        }

        
        public void SDL_MaximizeWindow()
        {

        }

        public int SDL_Init(int width, int height, IntPtr intPtr)
        {
            lock (this)
            {
                if (!isInit)
                {
                    // 初始化呼叫SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER)
                    if (SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER) < 0)
                    {
                        Console.WriteLine("Could not initialize SDL - {0}\n", SDL.SDL_GetError());
                        return -1;
                    }
                    isInit = true;
                }
                #region SDL呼叫
                if (sdltexture != IntPtr.Zero)
                {
                    SDL.SDL_DestroyTexture(sdltexture);
                }
                if (sdlrenderer != IntPtr.Zero)
                {
                    SDL.SDL_DestroyRenderer(sdlrenderer);
                }
                if (screen != IntPtr.Zero)
                {
                    SDL.SDL_DestroyWindow(screen);
                    SDL.SDL_RaiseWindow(screen);
                    SDL.SDL_RestoreWindow(screen);
                }
                //建立顯示視窗 
                screen = SDL.SDL_CreateWindowFrom(intPtr);
                SDL.SDL_ShowWindow(screen);

                SDL.SDL_SetWindowSize(screen, width, height);
                //screen = SDL.SDL_CreateWindow("SDL EVENT TEST", SDL.SDL_WINDOWPOS_UNDEFINED, SDL.SDL_WINDOWPOS_UNDEFINED, width, height, SDL.SDL_WindowFlags.SDL_WINDOW_OPENGL | SDL.SDL_WindowFlags.SDL_WINDOW_RESIZABLE);
                //screen = SDL.SDL_CreateWindow("SDL EVENT TEST", SDL.SDL_WINDOWPOS_UNDEFINED, SDL.SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL.SDL_WindowFlags.SDL_WINDOW_OPENGL | SDL.SDL_WindowFlags.SDL_WINDOW_RESIZABLE);
                if (screen == IntPtr.Zero)
                {
                    Console.WriteLine("Can't creat a window:{0}\n", SDL.SDL_GetError());
                    return -1;
                }

                //建立渲染器
                sdlrenderer = SDL.SDL_CreateRenderer(screen, -1, SDL.SDL_RendererFlags.SDL_RENDERER_ACCELERATED);
                //建立紋理 
                sdltexture = SDL.SDL_CreateTexture(sdlrenderer, SDL.SDL_PIXELFORMAT_IYUV, (int)SDL.SDL_TextureAccess.SDL_TEXTUREACCESS_STREAMING, width, height);
                #endregion

                return 0;
            }
        }


        public int SDL_Display(int width, int height, IntPtr pixels, int pixelsSize,
            int pitch)
        {
            lock (this)
            {
                #region SDL 視訊資料渲染播放
                //設定紋理的資料
                sdlrect.x = 0;
                sdlrect.y = 0;
                sdlrect.w = width;
                sdlrect.h = height;
                //SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
                SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);
                //複製紋理資訊到渲染器目標
                SDL.SDL_RenderClear(sdltexture);
                //SDL.SDL_Rect srcRect = sdlrect;
                //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);

                SDL.SDL_RenderCopy(sdlrenderer, sdltexture, IntPtr.Zero, IntPtr.Zero);
                //視訊渲染顯示
                SDL.SDL_RenderPresent(sdlrenderer);
                return 0;
            }


          
            #endregion


         
        }
    }
    public unsafe class SDLAudio
    {
        class aa
        {
            public byte[] pcm;
            public int len;
        }
        int lastIndex = 0;

        private List<aa> data = new List<aa>();

        //private List<byte> data = new List<byte>();
        SDL.SDL_AudioCallback Callback;
        public void PlayAudio(IntPtr pcm, int len)
        {
            lock (this)
            {
                byte[] bts = new byte[len];
                Marshal.Copy(pcm, bts, 0, len);
                data.Add(new aa
                {
                    len = len,
                    pcm = bts
                });
            }

            //SDL.SDL_Delay(10);
        }
        void SDL_AudioCallback(IntPtr userdata, IntPtr stream, int len)
        {
            ////SDL 2.0  
            ////SDL.SDL_RWFromMem(stream, 0, len);
            //if (audio_len == 0)
            //    return;
            //len = (len > audio_len ? audio_len : len);
            if (data.Count == 0)
            {
                for (int i = 0; i < len; i++)
                {
                    ((byte*)stream)[i] = 0;
                }
                return;
            }
            for (int i = 0; i < len; i++)
            {
                if (data[0].len > i)
                {
                    ((byte*)stream)[i] = data[0].pcm[i];
                }
                else
                    ((byte*)stream)[i] = 0;
            }
            data.RemoveAt(0);


           
        }
        public int SDL_Init()
        {
            Callback = SDL_AudioCallback;
            #region SDL呼叫
            //// 初始化呼叫SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER)
            //if (SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER) < 0)
            //{
            //    Console.WriteLine("Could not initialize SDL - {0}\n", SDL.SDL_GetError());
            //    return -1;
            //}

            #endregion


            SDL.SDL_AudioSpec wanted_spec = new SDL.SDL_AudioSpec();
            wanted_spec.freq = 8000;
            wanted_spec.format = SDL.AUDIO_S16;
            wanted_spec.channels = 1;
            wanted_spec.silence = 0;
            wanted_spec.samples = 320;
            wanted_spec.callback = Callback;


            if (SDL.SDL_OpenAudio(ref wanted_spec, IntPtr.Zero) < 0)
            {
                Console.WriteLine("can't open audio.");
                return -1;
            }
            //Play  
            SDL.SDL_PauseAudio(0);
            return 0;
        }

    }
   

}
SDL實現了基礎的播放功能。

C# Mp4檔案音視訊編碼器類

using CV.Video.Base;
using CV.Video.Base.FFmpeg;
using FFmpeg.AutoGen;
using JX;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace CvNetVideo.Codec.Video
{
    public unsafe class JT1078CodecForMp4
    {
       
        /// <summary>
        /// 指示當前解碼是否在執行
        /// </summary>
        public bool IsRun { get; protected set; }
        /// <summary>
        /// 視訊執行緒
        /// </summary>
        private Thread threadVideo;
        /// <summary>
        /// 音訊執行緒
        /// </summary>
        private Thread threadAudio;
        /// <summary>
        /// 退出控制
        /// </summary>
        private bool exit_thread = false;
        /// <summary>
        /// 暫停控制
        /// </summary>
        private bool pause_thread = false;
        /// <summary>
        ///  視訊輸出流videoindex
        /// </summary>
        private int videoindex = -1;
        /// <summary>
        ///  音訊輸出流audioindex
        /// </summary>
        private int audioindex = -1;

        /// <summary>
        /// 視訊H264轉YUV並使用SDL進行播放
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlVideo"></param>
        /// <returns></returns>
        public unsafe int RunVideo(string fileName,SDLHelper sdlVideo)
        {
            IsRun = true;
            exit_thread = false;
            pause_thread = false;
            threadVideo = Thread.CurrentThread;
            int error, frame_count = 0;
            int got_picture, ret;
            SwsContext* pSwsCtx = null;
            AVFormatContext* ofmt_ctx = null;
            IntPtr convertedFrameBufferPtr = IntPtr.Zero;
            try
            {
                // 註冊編解碼器
                ffmpeg.avcodec_register_all();

                // 獲取檔案資訊上下文初始化
                ofmt_ctx = ffmpeg.avformat_alloc_context();

                // 開啟媒體檔案
                error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
                if (error != 0)
                {
                    throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
                }

                // 獲取流的通道
                for (int i = 0; i < ofmt_ctx->nb_streams; i++)
                {
                    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                    {
                        videoindex = i;
                        Console.WriteLine("video.............."+videoindex);
                    }
                }

                if (videoindex == -1)
                {
                    Console.WriteLine("Couldn't find a video stream.(沒有找到視訊流)");
                    return -1;
                }

                // 視訊流處理
                if (videoindex > -1)
                {
                    //獲取視訊流中的編解碼上下文
                    AVCodecContext* pCodecCtx = ofmt_ctx->streams[videoindex]->codec;

                    //根據編解碼上下文中的編碼id查詢對應的解碼
                    AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodecCtx->codec_id);
                    if (pCodec == null)
                    {
                        Console.WriteLine("沒有找到編碼器");
                        return -1;
                    }

                    //開啟編碼器
                    if (ffmpeg.avcodec_open2(pCodecCtx, pCodec, null) < 0)
                    {
                        Console.WriteLine("編碼器無法開啟");
                        return -1;
                    }
                    Console.WriteLine("Find a  video stream.channel=" + videoindex);

                    //輸出視訊資訊
                    var format = ofmt_ctx->iformat->name->ToString();
                    var len = (ofmt_ctx->duration) / 1000000;
                    var width = pCodecCtx->width;
                    var height = pCodecCtx->height;
                    Console.WriteLine("video format:" + format);
                    Console.WriteLine("video length:" + len);
                    Console.WriteLine("video width&height:width=" + width + " height=" + height);
                    Console.WriteLine("video codec name:" + pCodec->name->ToString());

                    //準備讀取
                    //AVPacket用於儲存一幀一幀的壓縮資料(H264)
                    //緩衝區,開闢空間
                    AVPacket* packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));

                    //AVFrame用於儲存解碼後的畫素資料(YUV)
                    //記憶體分配
                    AVFrame* pFrame = ffmpeg.av_frame_alloc();
                    //YUV420
                    AVFrame* pFrameYUV = ffmpeg.av_frame_alloc();
                    //只有指定了AVFrame的畫素格式、畫面大小才能真正分配記憶體
                    //緩衝區分配記憶體
                    int out_buffer_size = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
                    byte* out_buffer = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size);
                    //初始化緩衝區
                    ffmpeg.avpicture_fill((AVPicture*)pFrameYUV, out_buffer, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

                    //用於轉碼(縮放)的引數,轉之前的寬高,轉之後的寬高,格式等
                    SwsContext* sws_ctx = ffmpeg.sws_getContext(pCodecCtx->width, pCodecCtx->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx->width, pCodecCtx->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);

                    while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
                    {
                        // 退出執行緒
                        if (exit_thread)
                        {
                            break;
                        }
                        // 暫停解析
                        if (pause_thread)
                        {
                            while (pause_thread)
                            {
                                Thread.Sleep(100);
                            }
                        }
                        //只要視訊壓縮資料(根據流的索引位置判斷)
                        if (packet->stream_index == videoindex)
                        {
                            //解碼一幀視訊壓縮資料,得到視訊畫素資料
                            ret = ffmpeg.avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                            if (ret < 0)
                            {
                                Console.WriteLine("視訊解碼錯誤");
                                return -1;
                            }

                            // 讀取解碼後的幀資料
                            if (got_picture>0)
                            {
                                frame_count++;
                                Console.WriteLine("視訊幀數:第 " + frame_count + " 幀");

                                //AVFrame轉為畫素格式YUV420,寬高
                                ffmpeg.sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);

                                //SDL播放YUV資料
                                var data = out_buffer;
                                sdlVideo.SDL_Display(pCodecCtx->width, pCodecCtx->height, (IntPtr)data, out_buffer_size, pFrameYUV->linesize[0]);
                            }
                        }

                        //釋放資源
                        ffmpeg.av_free_packet(packet);
                    }

                }

            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
            finally
            {
                if (&ofmt_ctx != null)
                {
                    ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流檔案 
                }
               
            }
            IsRun = false;
            return 0;
        }

        /// <summary>
        /// 音訊AAC轉PCM並使用SDL進行播放
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlAudio"></param>
        /// <returns></returns>
        public unsafe int RunAudio(string fileName, SDLAudio sdlAudio)
        {
            IsRun = true;
            exit_thread = false;
            pause_thread = false;
            threadAudio = Thread.CurrentThread;
            int error, frame_count = 0;
            int got_frame, ret;
            AVFormatContext* ofmt_ctx = null;
            SwsContext* pSwsCtx = null;
            IntPtr convertedFrameBufferPtr = IntPtr.Zero;
            try
            {
                // 註冊編解碼器
                ffmpeg.avcodec_register_all();

                // 獲取檔案資訊上下文初始化
                ofmt_ctx = ffmpeg.avformat_alloc_context();

                // 開啟媒體檔案
                error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
                if (error != 0)
                {
                    throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
                }

                // 獲取流的通道
                for (int i = 0; i < ofmt_ctx->nb_streams; i++)
                {
                    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                    {
                        audioindex = i;
                        Console.WriteLine("audio.............." + audioindex);
                    }
                }

                if (audioindex == -1)
                {
                    Console.WriteLine("Couldn't find a  audio stream.(沒有找到音訊流)");
                    return -1;
                }

                // 音訊流處理
                if (audioindex > -1)
                {
                    //根據索引拿到對應的流,根據流拿到解碼器上下文
                    AVCodecContext* pCodeCtx = ofmt_ctx->streams[audioindex]->codec;

                    //再根據上下文拿到編解碼id,通過該id拿到解碼器
                    AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodeCtx->codec_id);
                    if (pCodec == null)
                    {
                        Console.WriteLine("沒有找到編碼器");
                        return -1;
                    }
                    //開啟編碼器
                    if (ffmpeg.avcodec_open2(pCodeCtx,pCodec, null)<0)
                    {
                        Console.WriteLine("編碼器無法開啟");
                        return -1;
                    }
                    Console.WriteLine("Find a  audio stream. channel=" + audioindex);

                    //編碼資料
                    AVPacket* packet = (AVPacket*)ffmpeg.av_malloc((ulong)(sizeof(AVPacket)));
                    //解壓縮資料
                    AVFrame* frame = ffmpeg.av_frame_alloc();

                    //frame->16bit 44100 PCM 統一音訊取樣格式與取樣率
                    SwrContext* swrCtx = ffmpeg.swr_alloc();
                    //重取樣設定選項-----------------------------------------------------------start
                    //輸入的取樣格式
                    AVSampleFormat in_sample_fmt = pCodeCtx->sample_fmt;
                    //輸出的取樣格式 16bit PCM
                    AVSampleFormat out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    //輸入的取樣率
                    int in_sample_rate = pCodeCtx->sample_rate;
                    //輸出的取樣率
                    int out_sample_rate = 44100;
                    //輸入的聲道佈局
                    long in_ch_layout = (long)pCodeCtx->channel_layout;
                    //輸出的聲道佈局
                    int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;

                    ffmpeg.swr_alloc_set_opts(swrCtx, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
                    ffmpeg.swr_init(swrCtx);
                    //重取樣設定選項-----------------------------------------------------------end
                    //獲取輸出的聲道個數
                    int out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
                    //儲存pcm資料
                    byte* out_buffer = (byte*)ffmpeg.av_malloc(2 * 44100);
                  
                    //一幀一幀讀取壓縮的音訊資料AVPacket
                    while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
                    {
                        // 退出執行緒
                        if (exit_thread)
                        {
                            break;
                        }
                        // 暫停解析
                        if (pause_thread)
                        {
                            while (pause_thread)
                            {
                                Thread.Sleep(100);
                            }
                        }
                        if (packet->stream_index == audioindex)
                        {
                            //解碼AVPacket->AVFrame
                            ret = ffmpeg.avcodec_decode_audio4(pCodeCtx, frame, &got_frame, packet);
                            if (ret < 0)
                            {
                                Console.WriteLine("音訊解碼失敗");
                                return -1;
                            }
                            // 讀取幀資料
                            if (got_frame>0)
                            {
                                frame_count++;
                                Console.WriteLine("音訊幀數:第 "+ frame_count + " 幀");
                                var data_ = frame->data;
                                ffmpeg.swr_convert(swrCtx, &out_buffer, 2 * 44100,(byte**)&data_, frame->nb_samples);
                                //獲取sample的size
                                int out_buffer_size = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame->nb_samples, out_sample_fmt, 1);
                                //寫入檔案進行測試
                                var data=out_buffer;
                                sdlAudio.PlayAudio((IntPtr)data, out_buffer_size);
                            }
                        }
                        ffmpeg.av_free_packet(packet);
                    }

                }

            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
            finally
            {
                if (&ofmt_ctx != null)
                {
                    ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流檔案 
                }

            }
            IsRun = false;
            return 0;
        }


        /// <summary>
        /// 開啟執行緒
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlVideo"></param>
        public void Start(string fileName, SDLHelper sdlVideo,SDLAudio sdlAudio)
        {
            // 視訊執行緒
            threadVideo = new Thread(() =>
              {
                  try
                  {
                      RunVideo(fileName, sdlVideo);
                  }
                  catch (Exception ex)
                  {
                      SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Video", ex);
                  }
              });
            threadVideo.IsBackground = true;
            threadVideo.Start();

            // 音訊執行緒
            threadAudio = new Thread(() =>
            {
                try
                {
                    RunAudio(fileName, sdlAudio);
                }
                catch (Exception ex)
                {
                    SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Audio", ex);
                }
            });
            threadAudio.IsBackground = true;
            threadAudio.Start();
        }

        /// <summary>
        /// 暫停繼續
        /// </summary>
        public void GoOn()
        {
            pause_thread = false;

        }

        /// <summary>
        /// 暫停
        /// </summary>
        public void Pause()
        {
            pause_thread = true;
        }

        /// <summary>
        /// 停止
        /// </summary>
        public void Stop()
        {
            exit_thread = true;
        }
    }
}

暫停、繼續、停止在此處的意義不大,因為解析的速度很快。

測試程式碼及效果圖

  /// <summary>
        /// 播放
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnPlay_Click(object sender, EventArgs e)
        {
            // 音視訊媒體檔案路徑
            string fileName = "test.mp4";// 表示${Project_home}/bin/Debug/test.mp4
            // 執行緒讀取音視訊流
            jt1078CodecForMp4 = new JT1078CodecForMp4();
            jt1078CodecForMp4.Start(fileName,sdlVideo,sdlAudio);
        }


注意:此處出現綠色,是不正常的。修改播放方法的資料設定方式:

/// <summary>
        /// 播放視訊
        /// </summary>
        /// <param name="width"></param>
        /// <param name="height"></param>
        /// <param name="pixels"></param>
        /// <param name="pixelsSize"></param>
        /// <param name="pitch"></param>
        /// <returns></returns>
        public int SDL_Display(int width, int height, IntPtr pixels, int pixelsSize,
            int pitch)
        {
            lock (this)
            {
                while (isPause)
                {
                    SDL.SDL_Delay(20);//延遲播放
                }

                #region SDL 視訊資料渲染播放
                //設定紋理的資料
                sdlrect.x = 0;
                sdlrect.y = 0;
                sdlrect.w = width;
                sdlrect.h = height;
                SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
                //SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);//此處程式碼導致播放視窗綠色陰影
                //複製紋理資訊到渲染器目標
                SDL.SDL_RenderClear(sdltexture);
                //SDL.SDL_Rect srcRect = sdlrect;
                //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);

                SDL.SDL_RenderCopy(sdlrenderer, sdltexture, IntPtr.Zero, IntPtr.Zero);
                //視訊渲染顯示
                SDL.SDL_RenderPresent(sdlrenderer);
                //SDL.SDL_Delay(40);
                //SDL.SDL_PollEvent(out sdlevent);
                //switch (sdlevent.type)
                //{
                //    case SDL.SDL_EventType.SDL_QUIT:
                //        SDL.SDL_Quit();
                //        return -1;
                //    default:
                //        break;
                //}
                return 0;
            }


            //SDL.SDL_RenderClear(sdlrenderer);
            //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
            //SDL.SDL_RenderPresent(sdlrenderer);
            ////Delay 40ms  
            //SDL.SDL_Delay(40);
            #endregion


            //#region SDL 視訊資料渲染播放
            //////設定紋理的資料
            ////sdlrect.x = 0;
            ////sdlrect.y = 0;
            ////sdlrect.w = width;
            ////sdlrect.h = height;
            ////SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
            //////複製紋理資訊到渲染器目標
            ////SDL.SDL_Rect srcRect = sdlrect;
            ////SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
            //////視訊渲染顯示
            ////SDL.SDL_RenderPresent(sdlrenderer);
            //////SDL.SDL_Delay(40);
            ////SDL.SDL_PollEvent(out sdlevent);
            ////switch (sdlevent.type)
            ////{
            ////    case SDL.SDL_EventType.SDL_QUIT:
            ////        SDL.SDL_Quit();
            ////        return -1;
            ////    default:
            ////        break;
            ////}
            ////return 0;
            //#endregion
        }
    }

關鍵程式碼:

 SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
                //SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);//此處程式碼導致播放視窗綠色陰影

修改後效果:


程式碼改進,採用同一個執行緒播放音視訊:

/// <summary>
    /// MP4播放(音視訊使用同一個執行緒)
    /// </summary>
    public unsafe class JT1078CodecToPlayMp4Two
    {

        /// <summary>
        /// 指示當前解碼是否在執行
        /// </summary>
        public bool IsRun { get; protected set; }
        /// <summary>
        /// 當前執行緒
        /// </summary>
        private Thread thread;
        /// <summary>
        /// 退出控制
        /// </summary>
        private bool exit_thread = false;
        /// <summary>
        /// 暫停控制
        /// </summary>
        private bool pause_thread = false;
        /// <summary>
        ///  視訊輸出流videoindex
        /// </summary>
        private int videoindex = -1;
        /// <summary>
        ///  音訊輸出流audioindex
        /// </summary>
        private int audioindex = -1;

        private bool isInit = false;


        int error;
        AVFormatContext* ofmt_ctx = null;
        AVPacket* packet;
        AVCodecContext* pCodecCtx_Video;
        AVCodec* pCodec_Video;
        AVFrame* pFrame_Video;
        AVFrame* pFrameYUV_Video;
        SwsContext* sws_ctx_video;
        SDLHelper sdlVideo;
        SDLAudio sdlAudio;

        int out_buffer_size_video;
        byte* out_buffer_video;
        int video_frame_count, audio_frame_count;


        AVCodecContext* pCodeCtx_Audio;
        AVCodec* pCodec_Audio;
        AVFrame* frame_Audio;
        SwrContext* swrCtx_Audio;

        byte* out_buffer_audio;
        int out_buffer_size_audio;
        int out_channel_nb;
        AVSampleFormat out_sample_fmt;

        /// <summary>
        /// 初始化
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlVideo"></param>
        /// <param name="sdlAudio"></param>
        /// <returns></returns>
        public int Init(string fileName, SDLHelper sdlVideo, SDLAudio sdlAudio)
        {
            AVFormatContext* ofmt_ctx;

            // 註冊編解碼器
            ffmpeg.avcodec_register_all();

            // 獲取檔案資訊上下文初始化
            ofmt_ctx = ffmpeg.avformat_alloc_context();
            this.ofmt_ctx = ofmt_ctx;

            // 開啟媒體檔案
            error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
            if (error != 0)
            {
                throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
            }

            // 獲取流的通道
            for (int i = 0; i < ofmt_ctx->nb_streams; i++)
            {
                if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoindex = i;
                    Console.WriteLine("video.............." + videoindex);
                }
                if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audioindex = i;
                    Console.WriteLine("audio.............." + audioindex);
                }
            }

            if (videoindex == -1)
            {
                Console.WriteLine("Couldn't find a video stream.(沒有找到視訊流)");
                return -1;
            }

            if (audioindex == -1)
            {
                Console.WriteLine("Couldn't find a  audio stream.(沒有找到音訊流)");
                return -1;
            }

            #region 初始化視訊

            // 視訊流處理
            if (videoindex > -1)
            {
                //獲取視訊流中的編解碼上下文
                pCodecCtx_Video = ofmt_ctx->streams[videoindex]->codec;

                //根據編解碼上下文中的編碼id查詢對應的解碼
                pCodec_Video = ffmpeg.avcodec_find_decoder(pCodecCtx_Video->codec_id);
                if (pCodec_Video == null)
                {
                    Console.WriteLine("沒有找到編碼器");
                    return -1;
                }

                //開啟編碼器
                if (ffmpeg.avcodec_open2(pCodecCtx_Video, pCodec_Video, null) < 0)
                {
                    Console.WriteLine("編碼器無法開啟");
                    return -1;
                }
                Console.WriteLine("Find a  video stream.channel=" + videoindex);

                //輸出視訊資訊
                var format = ofmt_ctx->iformat->name->ToString();
                var len = (ofmt_ctx->duration) / 1000000;
                var width = pCodecCtx_Video->width;
                var height = pCodecCtx_Video->height;
                Console.WriteLine("video format:" + format);
                Console.WriteLine("video length:" + len);
                Console.WriteLine("video width&height:width=" + width + " height=" + height);
                Console.WriteLine("video codec name:" + pCodec_Video->name->ToString());

                //準備讀取
                //AVPacket用於儲存一幀一幀的壓縮資料(H264)

                //AVFrame用於儲存解碼後的畫素資料(YUV)
                //記憶體分配
                pFrame_Video = ffmpeg.av_frame_alloc();
                //YUV420
                pFrameYUV_Video = ffmpeg.av_frame_alloc();
                //只有指定了AVFrame的畫素格式、畫面大小才能真正分配記憶體
                //緩衝區分配記憶體
                out_buffer_size_video = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
                out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
                //初始化緩衝區
                ffmpeg.avpicture_fill((AVPicture*)pFrameYUV_Video, out_buffer_video, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
                //用於轉碼(縮放)的引數,轉之前的寬高,轉之後的寬高,格式等
                sws_ctx_video = ffmpeg.sws_getContext(pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
            }
            #endregion

            #region 初始化音訊
            // 音訊流處理
            if (audioindex > -1)
            {
                //根據索引拿到對應的流,根據流拿到解碼器上下文
                pCodeCtx_Audio = ofmt_ctx->streams[audioindex]->codec;

                //再根據上下文拿到編解碼id,通過該id拿到解碼器
                pCodec_Audio = ffmpeg.avcodec_find_decoder(pCodeCtx_Audio->codec_id);
                if (pCodec_Audio == null)
                {
                    Console.WriteLine("沒有找到編碼器");
                    return -1;
                }
                //開啟編碼器
                if (ffmpeg.avcodec_open2(pCodeCtx_Audio, pCodec_Audio, null) < 0)
                {
                    Console.WriteLine("編碼器無法開啟");
                    return -1;
                }
                Console.WriteLine("Find a  audio stream. channel=" + audioindex);

                //解壓縮資料
                frame_Audio = ffmpeg.av_frame_alloc();

                //frame->16bit 44100 PCM 統一音訊取樣格式與取樣率
                swrCtx_Audio = ffmpeg.swr_alloc();
                //重取樣設定選項-----------------------------------------------------------start
                //輸入的取樣格式
                AVSampleFormat in_sample_fmt = pCodeCtx_Audio->sample_fmt;
                //輸出的取樣格式 16bit PCM
                out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                //輸入的取樣率
                int in_sample_rate = pCodeCtx_Audio->sample_rate;
                //輸出的取樣率
                int out_sample_rate = 44100;
                //輸入的聲道佈局
                long in_ch_layout = (long)pCodeCtx_Audio->channel_layout;
                //輸出的聲道佈局
                int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;

                ffmpeg.swr_alloc_set_opts(swrCtx_Audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
                ffmpeg.swr_init(swrCtx_Audio);
                //重取樣設定選項-----------------------------------------------------------end
                //獲取輸出的聲道個數
                out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
                //儲存pcm資料
                out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 44100);
            }
            #endregion

            //緩衝區,開闢空間
            packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));

            // 設定SDL播放物件
            this.sdlVideo = sdlVideo;
            this.sdlAudio = sdlAudio;

            isInit = true;

            return 0;
        }


        /// <summary>
        /// 讀取音視訊流檔案並進行播放
        /// </summary>
        public unsafe int ReadAndPlay()
        {
            IsRun = true;
            exit_thread = false;
            pause_thread = false;
            thread = Thread.CurrentThread;
            //int error, frame_count = 0;
            int got_frame, ret;
            //SwsContext* pSwsCtx = null;

            byte* out_audio_buffer = out_buffer_audio;

            try
            {

                while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
                {

                    // 退出執行緒
                    if (exit_thread)
                    {
                        break;
                    }
                    // 暫停解析
                    while (pause_thread)
                    {
                        Thread.Sleep(100);
                    }
                    #region 視訊H264轉YUV並使用SDL進行播放
                    if (packet->stream_index == videoindex)
                    {
                        //解碼一幀視訊壓縮資料,得到視訊畫素資料
                        ret = ffmpeg.avcodec_decode_video2(pCodecCtx_Video, pFrame_Video, &got_frame, packet);
                        if (ret < 0)
                        {
                            Console.WriteLine("視訊解碼錯誤");
                            return -1;
                        }

                        // 讀取解碼後的幀資料
                        if (got_frame > 0)
                        {
                            double pts = 0; //ffmpeg.av_frame_get_best_effort_timestamp(pFrameYUV_Video);
                            //VideoState* vs = null;
                            //vs->video_clock = pts;
                            //vs->video_st = ofmt_ctx->streams[videoindex];
                            //pts = synchronize_video(vs, pFrame_Video, pts);
                            //if (queue_picture(is, pFrame, pts) < 0)
                            //{
                            //    break;
                            //}
                            video_frame_count++;
                            //存在問題的PTS計算

                            //int pts = video_frame_count++ * (pCodecCtx_Video->pkt_timebase.num * 1000 / 25 /* pCodecCtx->pkt_timebase.den*/);

                            Console.WriteLine("視訊幀數:第 " + video_frame_count + " 幀");

                            //AVFrame轉為畫素格式YUV420,寬高
                            ffmpeg.sws_scale(sws_ctx_video, pFrame_Video->data, pFrame_Video->linesize, 0, pCodecCtx_Video->height, pFrameYUV_Video->data, pFrameYUV_Video->linesize);

                            Console.WriteLine("視訊: pts= " + packet->pts + " dts=" + packet->dts);

                            // SDL播放YUV資料:下面兩種方式都可以進行播放
                            sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
                            //sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)pFrameYUV_Video->data[0], out_buffer_size_video, pFrameYUV_Video->linesize[0]);

                            //DeleyToPlay_Video(packet->pts);
                        }
                    }
                    #endregion

                    #region 音訊AAC轉PCM並使用SDL進行播放
                    if (packet->stream_index == audioindex)
                    {
                        //解碼AVPacket->AVFrame
                        ret = ffmpeg.avcodec_decode_audio4(pCodeCtx_Audio, frame_Audio, &got_frame, packet);
                        if (ret < 0)
                        {
                            Console.WriteLine("音訊解碼失敗");
                            return -1;
                        }
                        // 讀取幀資料
                        if (got_frame > 0)
                        {
                            audio_frame_count++;
                            Console.WriteLine("音訊幀數:第 " + audio_frame_count + " 幀");
                            // 變換音訊
                            ffmpeg.swr_convert(swrCtx_Audio, &out_audio_buffer, 2 * 44100, (byte**)&frame_Audio->data, frame_Audio->nb_samples);

                            // 獲取sample的size
                            out_buffer_size_audio = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame_Audio->nb_samples, out_sample_fmt, 1);

                            Console.WriteLine("音訊: pts= " + packet->pts + " dts=" + packet->dts);

                            // SDL進行音訊播放
                            sdlAudio.PlayAudio((IntPtr)out_audio_buffer, out_buffer_size_audio);

                            //DeleyToPlay_Audio(packet->pts);

                        }
                    }
                    #endregion
                    Thread.Sleep(20);
                    //釋放資源
                    ffmpeg.av_free_packet(packet);
                }


            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
            finally
            {
                //if (&ofmt_ctx != null)
                //{
                //    ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流檔案 
                //}

            }
            IsRun = false;
            return 0;
        }

        /// <summary>
        /// 開啟執行緒
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlVideo"></param>
        /// <param name="sdlAudio"></param>
        public void Start()
        {
            if (!isInit)
            {
                MessageBox.Show("沒有初始化");
            }
            thread = new Thread(() =>
            {
                try
                {
                    ReadAndPlay();
                }
                catch (Exception ex)
                {
                    SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Video", ex);
                }
            });
            thread.IsBackground = true;
            thread.Start();


        }

        /// <summary>
        /// 暫停繼續
        /// </summary>
        public void GoOnPlay()
        {
            pause_thread = false;
            sdlVideo.PlayVideo();
            sdlAudio.PlayAudio();
        }

        /// <summary>
        /// 暫停
        /// </summary>
        public void Pause()
        {
            pause_thread = true;
            sdlVideo.PauseVideo();
            sdlAudio.PauseAudio();
        }

        /// <summary>
        /// 停止
        /// </summary>
        public void Stop()
        {
            exit_thread = true;
        }

        long lastPts_Video = 0;
        DateTime lastTS_Video;

        long lastPts_Audio = 0;
        DateTime lastTS_Audio;

        private void DeleyToPlay_Video(long pts)
        {
            if (lastPts_Video > 0 && lastTS_Video != null)
            {
                double delay = (DateTime.Now - lastTS_Video).TotalMilliseconds;
                var i = (int)(pts - lastPts_Video - delay);
                if (i >= 1)
                {
                    Thread.Sleep(i);
                }
            }
            lastTS_Video = DateTime.Now;
            lastPts_Video = pts;
        }

        private void DeleyToPlay_Audio(long pts)
        {
            if (lastPts_Audio > 0 && lastTS_Audio != null)
            {
                double delay = (DateTime.Now - lastTS_Audio).TotalMilliseconds;
                var i = (int)(pts - lastPts_Audio - delay);
                if (i >= 1)
                {
                    Thread.Sleep(i);
                }
            }
            lastTS_Audio = DateTime.Now;
            lastPts_Audio = pts;
        }

        ////# http://dranger.com/ffmpeg/tutorial05.html
        //public struct VideoState
        //{
        //    public double video_clock; // pts of last decoded frame / predicted pts of next decoded frame

        //    public AVStream* video_st;// video stream
        //}

        //public unsafe double synchronize_video(VideoState* vs, AVFrame* src_frame, double pts)
        //{

        //    double frame_delay;

        //    if (pts != 0)
        //    {
        //        /* if we have pts, set video clock to it */
        //        vs->video_clock = pts;
        //    }
        //    else
        //    {
        //        /* if we aren't given a pts, set it to the clock */
        //        pts = vs->video_clock;
        //    }
        //    /* update the video clock */
        //    frame_delay = av_q2d(vs->video_st->codec->time_base);
        //    /* if we are repeating a frame, adjust clock accordingly */
        //    frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
        //    vs->video_clock += frame_delay;
        //    return pts;
        //}

        //struct VideoPicture
        //{
        //    double pts;
        //}
        //int queue_picture(VideoState* vs, AVFrame* pFrame, double pts)
        //{
        //    if (vp->bmp)
        //    {
        //    ... convert picture ...
        //         vp->pts = pts;
        //    ... alert queue ...
        //  }

        //}
    }
解決音視訊同步問題版本
using CV.Media.Utils.Filter;
using CV.Video.Base;
using CV.Video.Base.FFmpeg;
using FFmpeg.AutoGen;
using JX;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using static CvNetVideo.UCVideo;

namespace CvNetVideo.Codec.Video
{

    /// <summary>
    /// MP4播放(音視訊使用同一個執行緒)
    /// </summary>
    public unsafe class JT1078CodecToPlayMp4
    {

        /// <summary>
        /// 指示當前解碼是否在執行
        /// </summary>
        public bool IsRun { get; protected set; }
        /// <summary>
        /// 指示當前解碼是否在暫停
        /// </summary>
        public bool IsPause { get; protected set; }
        /// <summary>
        /// 當前執行緒
        /// </summary>
        public Thread thread;
        /// <summary>
        /// 退出控制
        /// </summary>
        private bool exit_thread = false;
        /// <summary>
        /// 暫停控制
        /// </summary>
        private bool pause_thread = false;
        /// <summary>
        ///  視訊輸出流videoindex
        /// </summary>
        private int videoindex = -1;
        /// <summary>
        ///  音訊輸出流audioindex
        /// </summary>
        private int audioindex = -1;
        /// <summary>
        /// 是否初始化
        /// </summary>
        private bool isInit = false;


        int error;
        AVFormatContext* ofmt_ctx = null;
        AVPacket* packet;
        AVCodecContext* pCodecCtx_Video;
        AVCodec* pCodec_Video;
        AVFrame* pFrame_Video;
        AVFrame* pFrameYUV_Video;
        SwsContext* sws_ctx_video;
        SDLHelper sdlVideo;
        SDLAudio sdlAudio;

        int out_buffer_size_video;
        byte* out_buffer_video;
        int video_frame_count, audio_frame_count;


        AVCodecContext* pCodeCtx_Audio;
        AVCodec* pCodec_Audio;
        AVFrame* frame_Audio;
        SwrContext* swrCtx_Audio;

        byte* out_buffer_audio;
        int out_buffer_size_audio;
        int out_channel_nb;
        AVSampleFormat out_sample_fmt;

        int contrast;// 對比度
        int brightness;// 亮度
        int contrast_last;// 對比度
        int brightness_last;// 亮度

        //對比度亮度
        private VideoFiltering m_video_filtering = new VideoFiltering();

        /// <summary>
        /// 設定影象對比度和亮度
        /// </summary>
        /// <param name="contrast"></param>
        /// <param name="brightness"></param>
        /// <returns></returns>
        public void SetContrastAndBrightness(int contrast, int brightness)
        {
            this.contrast = contrast;
            this.brightness = brightness;
        }
        /// <summary>
        /// YUV寬度
        /// </summary>
        public int YuvWidth { get; set; }
        /// <summary>
        /// YUV高度
        /// </summary>
        public int YuvHeight { get; set; }

        /// <summary>
        /// 記錄上一幀資料
        /// </summary>
        List<AVVideo> list = new List<AVVideo>();

        /// <summary>
        /// 初始化
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="sdlVideo"></param>
        /// <param name="sdlAudio"></param>
        /// <returns></returns>
        public int Init(string fileName, SDLHelper sdlVideo, SDLAudio sdlAudio)
        {
            AVFormatContext* ofmt_ctx;

            // 註冊編解碼器
            ffmpeg.avcodec_register_all();

            // 獲取檔案資訊上下文初始化
            ofmt_ctx = ffmpeg.avformat_alloc_context();
            this.ofmt_ctx = ofmt_ctx;

            // 開啟媒體檔案
            error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
            if (error != 0)
            {
                throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
            }

            // 獲取流的通道
            for (int i = 0; i < ofmt_ctx->nb_streams; i++)
            {
                if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoindex = i;
                    Console.WriteLine("video.............." + videoindex);
                }
                if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audioindex = i;
                    Console.WriteLine("audio.............." + audioindex);
                }
            }

            if (videoindex == -1)
            {
                Console.WriteLine("Couldn't find a video stream.(沒有找到視訊流)");
                return -1;
            }

            if (audioindex == -1)
            {
                Console.WriteLine("Couldn't find a  audio stream.(沒有找到音訊流)");
                return -1;
            }

            #region 初始化視訊

            // 視訊流處理
            if (videoindex > -1)
            {
                //獲取視訊流中的編解碼上下文
                pCodecCtx_Video = ofmt_ctx->streams[videoindex]->codec;

                //根據編解碼上下文中的編碼id查詢對應的解碼
                pCodec_Video = ffmpeg.avcodec_find_decoder(pCodecCtx_Video->codec_id);
                if (pCodec_Video == null)
                {
                    Console.WriteLine("沒有找到編碼器");
                    return -1;
                }

                //開啟編碼器
                if (ffmpeg.avcodec_open2(pCodecCtx_Video, pCodec_Video, null) < 0)
                {
                    Console.WriteLine("編碼器無法開啟");
                    return -1;
                }
                Console.WriteLine("Find a  video stream.channel=" + videoindex);

                //輸出視訊資訊
                var format = ofmt_ctx->iformat->name->ToString();
                var len = (ofmt_ctx->duration) / 1000000;
                var width = pCodecCtx_Video->width;
                var height = pCodecCtx_Video->height;
                Console.WriteLine("video format:" + format);
                Console.WriteLine("video length:" + len);
                Console.WriteLine("video width&height:width=" + width + " height=" + height);
                Console.WriteLine("video codec name:" + pCodec_Video->name->ToString());

                //準備讀取
                //AVPacket用於儲存一幀一幀的壓縮資料(H264)

                //AVFrame用於儲存解碼後的畫素資料(YUV)
                //記憶體分配
                pFrame_Video = ffmpeg.av_frame_alloc();
                //YUV420
                pFrameYUV_Video = ffmpeg.av_frame_alloc();
                //只有指定了AVFrame的畫素格式、畫面大小才能真正分配記憶體
                //緩衝區分配記憶體
                out_buffer_size_video = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
                out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
                //初始化緩衝區
                ffmpeg.avpicture_fill((AVPicture*)pFrameYUV_Video, out_buffer_video, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
                //用於轉碼(縮放)的引數,轉之前的寬高,轉之後的寬高,格式等
                sws_ctx_video = ffmpeg.sws_getContext(pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
            }
            #endregion

            #region 初始化音訊
            // 音訊流處理
            if (audioindex > -1)
            {
                //根據索引拿到對應的流,根據流拿到解碼器上下文
                pCodeCtx_Audio = ofmt_ctx->streams[audioindex]->codec;

                //再根據上下文拿到編解碼id,通過該id拿到解碼器
                pCodec_Audio = ffmpeg.avcodec_find_decoder(pCodeCtx_Audio->codec_id);
                if (pCodec_Audio == null)
                {
                    Console.WriteLine("沒有找到編碼器");
                    return -1;
                }
                //開啟編碼器
                if (ffmpeg.avcodec_open2(pCodeCtx_Audio, pCodec_Audio, null) < 0)
                {
                    Console.WriteLine("編碼器無法開啟");
                    return -1;
                }
                Console.WriteLine("Find a  audio stream. channel=" + audioindex);

                //解壓縮資料
                frame_Audio = ffmpeg.av_frame_alloc();

                //frame->16bit 8000 PCM 統一音訊取樣格式與取樣率
                swrCtx_Audio = ffmpeg.swr_alloc();
                //重取樣設定選項-----------------------------------------------------------start
                //輸入的取樣格式
                AVSampleFormat in_sample_fmt = pCodeCtx_Audio->sample_fmt;
                //輸出的取樣格式 16bit PCM
                out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                //輸入的取樣率
                int in_sample_rate = pCodeCtx_Audio->sample_rate;
                //輸出的取樣率
                int out_sample_rate = 8000;
                //輸入的聲道佈局
                long in_ch_layout = (long)pCodeCtx_Audio->channel_layout;
                //輸出的聲道佈局
                int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;

                ffmpeg.swr_alloc_set_opts(swrCtx_Audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
                ffmpeg.swr_init(swrCtx_Audio);
                //重取樣設定選項-----------------------------------------------------------end
                //獲取輸出的聲道個數
                out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
                //儲存pcm資料
                out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 8000);
            }
            #endregion

            //緩衝區,開闢空間
            packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));

            // 設定SDL播放物件
            this.sdlVideo = sdlVideo;
            this.sdlAudio = sdlAudio;

            isInit = true;

            return 0;
        }


        /// <summary>
        /// 讀取音視訊流檔案並進行播放
        /// </summary>
        public unsafe int ReadAndPlay(PlayFinishedDo playFinishedDo)
        {
            IsRun = true;
            exit_thread = false;
            pause_thread = false;
            thread = Thread.CurrentThread;
            //int error, frame_count = 0;
            int got_frame, ret;
            //SwsContext* pSwsCtx = null;

            byte* out_audio_buffer = out_buffer_audio;

            try
            {
                AVStream* video_stream = ofmt_ctx->streams[videoindex];

                while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0&& !exit_thread)
                {
                    // 暫停解析
                    while (pause_thread||isLastFrame)
                    {
                        // 退出執行緒
                        if (exit_thread)
                        {
                            break;
                        }
                        Thread.Sleep(10);
                    }
                    // 退出執行緒
                    if (exit_thread)
                    {
                        break;
                    }

                    // 此處記錄視訊的第一幀和第一幀的開始時間
                    if (firstPts == -1 && packet->stream_index == videoindex)
                    {
                        firstPts = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
                        startTS = DateTime.Now;
                    }
                    // 針對視訊做延時播放,音訊自然播放就行不做處理
                    if (packet->stream_index == videoindex)
                    {
                        long pts_1 = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
                        DeleyToPlay(pts_1);
                    }

                    #region 視訊H264轉YUV並使用SDL進行播放
                    if (packet->stream_index == videoindex)
                    {
                        //解碼一幀視訊壓縮資料,得到視訊畫素資料
                        ret = ffmpeg.avcodec_decode_video2(pCodecCtx_Video, pFrame_Video, &got_frame, packet);
                        if (ret < 0)
                        {
                            Console.WriteLine("視訊解碼錯誤");
                            return -1;
                        }

                        //濾波,亮度,對比度===參考JT1078ToYuv -----------開始
                        int width = pCodecCtx_Video->width;
                        int height = pCodecCtx_Video->height;
                        if (contrast != contrast_last || brightness != brightness_last)
                        {
                            m_video_filtering.Reset(width, height, contrast, brightness);
                            contrast_last = contrast;
                            brightness_last = brightness;
                        }

                        //濾波,亮度,對比度===參考JT1078ToYuv -----------結束

                        // 讀取解碼後的幀資料
                        if (got_frame > 0)
                        {
                            video_frame_count++;
                            //>>>>濾波,亮度,對比度===參考JT1078ToYuv -----------開始
                            AVFrame* frame_filter;
                            ret = m_video_filtering.Filter(pFrame_Video, &frame_filter);
                            //>>>>濾波,亮度,對比度===參考JT1078ToYuv -----------結束


                            //AVFrame轉為畫素格式YUV420,寬高
                            ffmpeg.sws_scale(sws_ctx_video, frame_filter->data, frame_filter->linesize, 0, pCodecCtx_Video->height, pFrameYUV_Video->data, pFrameYUV_Video->linesize);

                            // 記錄上一幀影象保持10個幀數
                            AVVideo videoFrame = new AVVideo(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
                            list.Add(videoFrame);
                            if (list.Count > 10) list.RemoveAt(0);

                            // SDL播放YUV資料:下面兩種方式都可以進行播放
                            sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height,YuvWidth, YuvHeight, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
                            //sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)pFrameYUV_Video->data[0], out_buffer_size_video, pFrameYUV_Video->linesize[0]);

                            // 播放下一幀時進行暫停
                            if (isNextFrame)
                            {
                                Pause();
                                isNextFrame = false;
                            }

                            // 釋放濾波
                            m_video_filtering.UnrefFrame();
                        }
                    }
                    #endregion

                    #region 音訊AAC轉PCM並使用SDL進行播放
                    if (packet->stream_index == audioindex)
                    {
                        //解碼AVPacket->AVFrame
                        ret = ffmpeg.avcodec_decode_audio4(pCodeCtx_Audio, frame_Audio, &got_frame, packet);
                        if (ret < 0)
                        {
                            Console.WriteLine("音訊解碼失敗");