欧美bbbwbbbw肥妇,免费乱码人妻系列日韩,一级黄片

C# 使用SDL2實現(xiàn)Mp4文件播放音視頻操作

 更新時間:2020年12月08日 14:23:21   作者:boonya  
這篇文章主要介紹了C# 使用SDL2實現(xiàn)Mp4文件播放音視頻操作,具有很好的參考價值,希望對大家有所幫助。一起跟隨小編過來看看吧

播放音視頻的關鍵:視頻的格式是H264,音頻的格式是AAC。使用ffmpeg探測流的方式來實現(xiàn)音視頻流的解碼播放。

數(shù)據(jù)處理邏輯:H264->YUV AAC->PCM。

SDL2工具類

using SDL2;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks; 
namespace CvNetVideo
{
 public unsafe class SDLHelper
 {
  private IntPtr screen;
  private IntPtr sdlrenderer;
  private IntPtr sdltexture;
  SDL.SDL_Rect sdlrect;
  SDL.SDL_Event sdlevent;
  bool isInit = false;
  public SDLHelper()
  { 
  }  
  public void SDL_MaximizeWindow()
  {
 
  } 
  public int SDL_Init(int width, int height, IntPtr intPtr)
  {
   lock (this)
   {
    if (!isInit)
    {
     // 初始化調用SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER)
     if (SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER) < 0)
     {
      Console.WriteLine("Could not initialize SDL - {0}\n", SDL.SDL_GetError());
      return -1;
     }
     isInit = true;
    }
    #region SDL調用
    if (sdltexture != IntPtr.Zero)
    {
     SDL.SDL_DestroyTexture(sdltexture);
    }
    if (sdlrenderer != IntPtr.Zero)
    {
     SDL.SDL_DestroyRenderer(sdlrenderer);
    }
    if (screen != IntPtr.Zero)
    {
     SDL.SDL_DestroyWindow(screen);
     SDL.SDL_RaiseWindow(screen);
     SDL.SDL_RestoreWindow(screen);
    }
    //創(chuàng)建顯示窗口 
    screen = SDL.SDL_CreateWindowFrom(intPtr);
    SDL.SDL_ShowWindow(screen);
 
    SDL.SDL_SetWindowSize(screen, width, height);
    //screen = SDL.SDL_CreateWindow("SDL EVENT TEST", SDL.SDL_WINDOWPOS_UNDEFINED, SDL.SDL_WINDOWPOS_UNDEFINED, width, height, SDL.SDL_WindowFlags.SDL_WINDOW_OPENGL | SDL.SDL_WindowFlags.SDL_WINDOW_RESIZABLE);
    //screen = SDL.SDL_CreateWindow("SDL EVENT TEST", SDL.SDL_WINDOWPOS_UNDEFINED, SDL.SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL.SDL_WindowFlags.SDL_WINDOW_OPENGL | SDL.SDL_WindowFlags.SDL_WINDOW_RESIZABLE);
    if (screen == IntPtr.Zero)
    {
     Console.WriteLine("Can't creat a window:{0}\n", SDL.SDL_GetError());
     return -1;
    }
 
    //創(chuàng)建渲染器
    sdlrenderer = SDL.SDL_CreateRenderer(screen, -1, SDL.SDL_RendererFlags.SDL_RENDERER_ACCELERATED);
    //創(chuàng)建紋理 
    sdltexture = SDL.SDL_CreateTexture(sdlrenderer, SDL.SDL_PIXELFORMAT_IYUV, (int)SDL.SDL_TextureAccess.SDL_TEXTUREACCESS_STREAMING, width, height);
    #endregion
 
    return 0;
   }
  } 
  public int SDL_Display(int width, int height, IntPtr pixels, int pixelsSize,
   int pitch)
  {
   lock (this)
   {
    #region SDL 視頻數(shù)據(jù)渲染播放
    //設置紋理的數(shù)據(jù)
    sdlrect.x = 0;
    sdlrect.y = 0;
    sdlrect.w = width;
    sdlrect.h = height;
    //SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
    SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);
    //復制紋理信息到渲染器目標
    SDL.SDL_RenderClear(sdltexture);
    //SDL.SDL_Rect srcRect = sdlrect;
    //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
 
    SDL.SDL_RenderCopy(sdlrenderer, sdltexture, IntPtr.Zero, IntPtr.Zero);
    //視頻渲染顯示
    SDL.SDL_RenderPresent(sdlrenderer);
    return 0;
   }   
   #endregion   
  }
 }
 public unsafe class SDLAudio
 {
  class aa
  {
   public byte[] pcm;
   public int len;
  }
  int lastIndex = 0;
 
  private List<aa> data = new List<aa>();
 
  //private List<byte> data = new List<byte>();
  SDL.SDL_AudioCallback Callback;
  public void PlayAudio(IntPtr pcm, int len)
  {
   lock (this)
   {
    byte[] bts = new byte[len];
    Marshal.Copy(pcm, bts, 0, len);
    data.Add(new aa
    {
     len = len,
     pcm = bts
    });
   }
 
   //SDL.SDL_Delay(10);
  }
  void SDL_AudioCallback(IntPtr userdata, IntPtr stream, int len)
  {
   SDL 2.0 
   SDL.SDL_RWFromMem(stream, 0, len);
   //if (audio_len == 0)
   // return;
   //len = (len > audio_len ? audio_len : len);
   if (data.Count == 0)
   {
    for (int i = 0; i < len; i++)
    {
     ((byte*)stream)[i] = 0;
    }
    return;
   }
   for (int i = 0; i < len; i++)
   {
    if (data[0].len > i)
    {
     ((byte*)stream)[i] = data[0].pcm[i];
    }
    else
     ((byte*)stream)[i] = 0;
   }
   data.RemoveAt(0);   
  }
  public int SDL_Init()
  {
   Callback = SDL_AudioCallback;
   #region SDL調用
    初始化調用SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER)
   //if (SDL.SDL_Init(SDL.SDL_INIT_VIDEO | SDL.SDL_INIT_AUDIO | SDL.SDL_INIT_TIMER) < 0)
   //{
   // Console.WriteLine("Could not initialize SDL - {0}\n", SDL.SDL_GetError());
   // return -1;
   //}
 
   #endregion 
   SDL.SDL_AudioSpec wanted_spec = new SDL.SDL_AudioSpec();
   wanted_spec.freq = 8000;
   wanted_spec.format = SDL.AUDIO_S16;
   wanted_spec.channels = 1;
   wanted_spec.silence = 0;
   wanted_spec.samples = 320;
   wanted_spec.callback = Callback; 
 
   if (SDL.SDL_OpenAudio(ref wanted_spec, IntPtr.Zero) < 0)
   {
    Console.WriteLine("can't open audio.");
    return -1;
   }
   //Play 
   SDL.SDL_PauseAudio(0);
   return 0;
  } 
 } 
}

SDL實現(xiàn)了基礎的播放功能。

C# Mp4文件音視頻編碼器類

using CV.Video.Base;
using CV.Video.Base.FFmpeg;
using FFmpeg.AutoGen;
using JX;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
 
namespace CvNetVideo.Codec.Video
{
 public unsafe class JT1078CodecForMp4
 {
  
  /// <summary>
  /// 指示當前解碼是否在運行
  /// </summary>
  public bool IsRun { get; protected set; }
  /// <summary>
  /// 視頻線程
  /// </summary>
  private Thread threadVideo;
  /// <summary>
  /// 音頻線程
  /// </summary>
  private Thread threadAudio;
  /// <summary>
  /// 退出控制
  /// </summary>
  private bool exit_thread = false;
  /// <summary>
  /// 暫??刂?
  /// </summary>
  private bool pause_thread = false;
  /// <summary>
  /// 視頻輸出流videoindex
  /// </summary>
  private int videoindex = -1;
  /// <summary>
  /// 音頻輸出流audioindex
  /// </summary>
  private int audioindex = -1;
 
  /// <summary>
  /// 視頻H264轉YUV并使用SDL進行播放
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  /// <returns></returns>
  public unsafe int RunVideo(string fileName,SDLHelper sdlVideo)
  {
   IsRun = true;
   exit_thread = false;
   pause_thread = false;
   threadVideo = Thread.CurrentThread;
   int error, frame_count = 0;
   int got_picture, ret;
   SwsContext* pSwsCtx = null;
   AVFormatContext* ofmt_ctx = null;
   IntPtr convertedFrameBufferPtr = IntPtr.Zero;
   try
   {
    // 注冊編解碼器
    ffmpeg.avcodec_register_all();
 
    // 獲取文件信息上下文初始化
    ofmt_ctx = ffmpeg.avformat_alloc_context();
 
    // 打開媒體文件
    error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
    if (error != 0)
    {
     throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
    }
 
    // 獲取流的通道
    for (int i = 0; i < ofmt_ctx->nb_streams; i++)
    {
     if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
     {
      videoindex = i;
      Console.WriteLine("video.............."+videoindex);
     }
    }
 
    if (videoindex == -1)
    {
     Console.WriteLine("Couldn't find a video stream.(沒有找到視頻流)");
     return -1;
    }
 
    // 視頻流處理
    if (videoindex > -1)
    {
     //獲取視頻流中的編解碼上下文
     AVCodecContext* pCodecCtx = ofmt_ctx->streams[videoindex]->codec;
 
     //根據(jù)編解碼上下文中的編碼id查找對應的解碼
     AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodecCtx->codec_id);
     if (pCodec == null)
     {
      Console.WriteLine("沒有找到編碼器");
      return -1;
     }
 
     //打開編碼器
     if (ffmpeg.avcodec_open2(pCodecCtx, pCodec, null) < 0)
     {
      Console.WriteLine("編碼器無法打開");
      return -1;
     }
     Console.WriteLine("Find a video stream.channel=" + videoindex);
 
     //輸出視頻信息
     var format = ofmt_ctx->iformat->name->ToString();
     var len = (ofmt_ctx->duration) / 1000000;
     var width = pCodecCtx->width;
     var height = pCodecCtx->height;
     Console.WriteLine("video format:" + format);
     Console.WriteLine("video length:" + len);
     Console.WriteLine("video width&height:width=" + width + " height=" + height);
     Console.WriteLine("video codec name:" + pCodec->name->ToString());
 
     //準備讀取
     //AVPacket用于存儲一幀一幀的壓縮數(shù)據(jù)(H264)
     //緩沖區(qū),開辟空間
     AVPacket* packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));
 
     //AVFrame用于存儲解碼后的像素數(shù)據(jù)(YUV)
     //內存分配
     AVFrame* pFrame = ffmpeg.av_frame_alloc();
     //YUV420
     AVFrame* pFrameYUV = ffmpeg.av_frame_alloc();
     //只有指定了AVFrame的像素格式、畫面大小才能真正分配內存
     //緩沖區(qū)分配內存
     int out_buffer_size = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
     byte* out_buffer = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size);
     //初始化緩沖區(qū)
     ffmpeg.avpicture_fill((AVPicture*)pFrameYUV, out_buffer, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
 
     //用于轉碼(縮放)的參數(shù),轉之前的寬高,轉之后的寬高,格式等
     SwsContext* sws_ctx = ffmpeg.sws_getContext(pCodecCtx->width, pCodecCtx->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx->width, pCodecCtx->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
 
     while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
     {
      // 退出線程
      if (exit_thread)
      {
       break;
      }
      // 暫停解析
      if (pause_thread)
      {
       while (pause_thread)
       {
        Thread.Sleep(100);
       }
      }
      //只要視頻壓縮數(shù)據(jù)(根據(jù)流的索引位置判斷)
      if (packet->stream_index == videoindex)
      {
       //解碼一幀視頻壓縮數(shù)據(jù),得到視頻像素數(shù)據(jù)
       ret = ffmpeg.avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
       if (ret < 0)
       {
        Console.WriteLine("視頻解碼錯誤");
        return -1;
       }
 
       // 讀取解碼后的幀數(shù)據(jù)
       if (got_picture>0)
       {
        frame_count++;
        Console.WriteLine("視頻幀數(shù):第 " + frame_count + " 幀");
 
        //AVFrame轉為像素格式YUV420,寬高
        ffmpeg.sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
 
        //SDL播放YUV數(shù)據(jù)
        var data = out_buffer;
        sdlVideo.SDL_Display(pCodecCtx->width, pCodecCtx->height, (IntPtr)data, out_buffer_size, pFrameYUV->linesize[0]);
       }
      }
 
      //釋放資源
      ffmpeg.av_free_packet(packet);
     } 
    } 
   }
   catch (Exception ex)
   {
    Console.WriteLine(ex);
   }
   finally
   {
    if (&ofmt_ctx != null)
    {
     ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流文件 
    }    
   }
   IsRun = false;
   return 0;
  }
 
  /// <summary>
  /// 音頻AAC轉PCM并使用SDL進行播放
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlAudio"></param>
  /// <returns></returns>
  public unsafe int RunAudio(string fileName, SDLAudio sdlAudio)
  {
   IsRun = true;
   exit_thread = false;
   pause_thread = false;
   threadAudio = Thread.CurrentThread;
   int error, frame_count = 0;
   int got_frame, ret;
   AVFormatContext* ofmt_ctx = null;
   SwsContext* pSwsCtx = null;
   IntPtr convertedFrameBufferPtr = IntPtr.Zero;
   try
   {
    // 注冊編解碼器
    ffmpeg.avcodec_register_all();
 
    // 獲取文件信息上下文初始化
    ofmt_ctx = ffmpeg.avformat_alloc_context();
 
    // 打開媒體文件
    error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
    if (error != 0)
    {
     throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
    }
 
    // 獲取流的通道
    for (int i = 0; i < ofmt_ctx->nb_streams; i++)
    {
     if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
     {
      audioindex = i;
      Console.WriteLine("audio.............." + audioindex);
     }
    }
 
    if (audioindex == -1)
    {
     Console.WriteLine("Couldn't find a audio stream.(沒有找到音頻流)");
     return -1;
    }
 
    // 音頻流處理
    if (audioindex > -1)
    {
     //根據(jù)索引拿到對應的流,根據(jù)流拿到解碼器上下文
     AVCodecContext* pCodeCtx = ofmt_ctx->streams[audioindex]->codec;
 
     //再根據(jù)上下文拿到編解碼id,通過該id拿到解碼器
     AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodeCtx->codec_id);
     if (pCodec == null)
     {
      Console.WriteLine("沒有找到編碼器");
      return -1;
     }
     //打開編碼器
     if (ffmpeg.avcodec_open2(pCodeCtx,pCodec, null)<0)
     {
      Console.WriteLine("編碼器無法打開");
      return -1;
     }
     Console.WriteLine("Find a audio stream. channel=" + audioindex);
 
     //編碼數(shù)據(jù)
     AVPacket* packet = (AVPacket*)ffmpeg.av_malloc((ulong)(sizeof(AVPacket)));
     //解壓縮數(shù)據(jù)
     AVFrame* frame = ffmpeg.av_frame_alloc();
 
     //frame->16bit 44100 PCM 統(tǒng)一音頻采樣格式與采樣率
     SwrContext* swrCtx = ffmpeg.swr_alloc();
     //重采樣設置選項-----------------------------------------------------------start
     //輸入的采樣格式
     AVSampleFormat in_sample_fmt = pCodeCtx->sample_fmt;
     //輸出的采樣格式 16bit PCM
     AVSampleFormat out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
     //輸入的采樣率
     int in_sample_rate = pCodeCtx->sample_rate;
     //輸出的采樣率
     int out_sample_rate = 44100;
     //輸入的聲道布局
     long in_ch_layout = (long)pCodeCtx->channel_layout;
     //輸出的聲道布局
     int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;
 
     ffmpeg.swr_alloc_set_opts(swrCtx, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
     ffmpeg.swr_init(swrCtx);
     //重采樣設置選項-----------------------------------------------------------end
     //獲取輸出的聲道個數(shù)
     int out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
     //存儲pcm數(shù)據(jù)
     byte* out_buffer = (byte*)ffmpeg.av_malloc(2 * 44100);
     
     //一幀一幀讀取壓縮的音頻數(shù)據(jù)AVPacket
     while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
     {
      // 退出線程
      if (exit_thread)
      {
       break;
      }
      // 暫停解析
      if (pause_thread)
      {
       while (pause_thread)
       {
        Thread.Sleep(100);
       }
      }
      if (packet->stream_index == audioindex)
      {
       //解碼AVPacket->AVFrame
       ret = ffmpeg.avcodec_decode_audio4(pCodeCtx, frame, &got_frame, packet);
       if (ret < 0)
       {
        Console.WriteLine("音頻解碼失敗");
        return -1;
       }
       // 讀取幀數(shù)據(jù)
       if (got_frame>0)
       {
        frame_count++;
        Console.WriteLine("音頻幀數(shù):第 "+ frame_count + " 幀");
        var data_ = frame->data;
        ffmpeg.swr_convert(swrCtx, &out_buffer, 2 * 44100,(byte**)&data_, frame->nb_samples);
        //獲取sample的size
        int out_buffer_size = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame->nb_samples, out_sample_fmt, 1);
        //寫入文件進行測試
        var data=out_buffer;
        sdlAudio.PlayAudio((IntPtr)data, out_buffer_size);
       }
      }
      ffmpeg.av_free_packet(packet);
     } 
    } 
   }
   catch (Exception ex)
   {
    Console.WriteLine(ex);
   }
   finally
   {
    if (&ofmt_ctx != null)
    {
     ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流文件 
    } 
   }
   IsRun = false;
   return 0;
  } 
 
  /// <summary>
  /// 開啟線程
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  public void Start(string fileName, SDLHelper sdlVideo,SDLAudio sdlAudio)
  {
   // 視頻線程
   threadVideo = new Thread(() =>
    {
     try
     {
      RunVideo(fileName, sdlVideo);
     }
     catch (Exception ex)
     {
      SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Video", ex);
     }
    });
   threadVideo.IsBackground = true;
   threadVideo.Start();
 
   // 音頻線程
   threadAudio = new Thread(() =>
   {
    try
    {
     RunAudio(fileName, sdlAudio);
    }
    catch (Exception ex)
    {
     SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Audio", ex);
    }
   });
   threadAudio.IsBackground = true;
   threadAudio.Start();
  }
 
  /// <summary>
  /// 暫停繼續(xù)
  /// </summary>
  public void GoOn()
  {
   pause_thread = false;
 
  }
 
  /// <summary>
  /// 暫停
  /// </summary>
  public void Pause()
  {
   pause_thread = true;
  }
 
  /// <summary>
  /// 停止
  /// </summary>
  public void Stop()
  {
   exit_thread = true;
  }
 }
}

暫停、繼續(xù)、停止在此處的意義不大,因為解析的速度很快。

測試代碼及效果圖

 /// <summary>
  /// 播放
  /// </summary>
  /// <param name="sender"></param>
  /// <param name="e"></param>
  private void btnPlay_Click(object sender, EventArgs e)
  {
   // 音視頻媒體文件路徑
   string fileName = "test.mp4";// 表示${Project_home}/bin/Debug/test.mp4
   // 線程讀取音視頻流
   jt1078CodecForMp4 = new JT1078CodecForMp4();
   jt1078CodecForMp4.Start(fileName,sdlVideo,sdlAudio);
  }

注意:此處出現(xiàn)綠色,是不正常的。修改播放方法的數(shù)據(jù)設置方式:

/// <summary>
  /// 播放視頻
  /// </summary>
  /// <param name="width"></param>
  /// <param name="height"></param>
  /// <param name="pixels"></param>
  /// <param name="pixelsSize"></param>
  /// <param name="pitch"></param>
  /// <returns></returns>
  public int SDL_Display(int width, int height, IntPtr pixels, int pixelsSize,
   int pitch)
  {
   lock (this)
   {
    while (isPause)
    {
     SDL.SDL_Delay(20);//延遲播放
    }
 
    #region SDL 視頻數(shù)據(jù)渲染播放
    //設置紋理的數(shù)據(jù)
    sdlrect.x = 0;
    sdlrect.y = 0;
    sdlrect.w = width;
    sdlrect.h = height;
    SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
    //SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);//此處代碼導致播放窗口綠色陰影
    //復制紋理信息到渲染器目標
    SDL.SDL_RenderClear(sdltexture);
    //SDL.SDL_Rect srcRect = sdlrect;
    //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
 
    SDL.SDL_RenderCopy(sdlrenderer, sdltexture, IntPtr.Zero, IntPtr.Zero);
    //視頻渲染顯示
    SDL.SDL_RenderPresent(sdlrenderer);
    //SDL.SDL_Delay(40);
    //SDL.SDL_PollEvent(out sdlevent);
    //switch (sdlevent.type)
    //{
    // case SDL.SDL_EventType.SDL_QUIT:
    //  SDL.SDL_Quit();
    //  return -1;
    // default:
    //  break;
    //}
    return 0;
   } 
 
   //SDL.SDL_RenderClear(sdlrenderer);
   //SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
   //SDL.SDL_RenderPresent(sdlrenderer);
   Delay 40ms 
   //SDL.SDL_Delay(40);
   #endregion 
 
   //#region SDL 視頻數(shù)據(jù)渲染播放
   //設置紋理的數(shù)據(jù)
   sdlrect.x = 0;
   sdlrect.y = 0;
   sdlrect.w = width;
   sdlrect.h = height;
   SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);
   //復制紋理信息到渲染器目標
   SDL.SDL_Rect srcRect = sdlrect;
   SDL.SDL_RenderCopy(sdlrenderer, sdltexture, ref srcRect, ref sdlrect);
   //視頻渲染顯示
   SDL.SDL_RenderPresent(sdlrenderer);
   //SDL.SDL_Delay(40);
   SDL.SDL_PollEvent(out sdlevent);
   switch (sdlevent.type)
   {
    case SDL.SDL_EventType.SDL_QUIT:
     SDL.SDL_Quit();
     return -1;
    default:
     break;
   }
   return 0;
   //#endregion
  }
 }

關鍵代碼:

SDL.SDL_UpdateTexture(sdltexture, ref sdlrect, pixels, pitch);

//SDL.SDL_UpdateTexture(sdltexture, IntPtr.Zero, pixels, pitch);//此處代碼導致播放窗口綠色陰影

修改后效果:

代碼改進,采用同一個線程播放音視頻:

/// <summary>
 /// MP4播放(音視頻使用同一個線程)
 /// </summary>
 public unsafe class JT1078CodecToPlayMp4Two
 {
 
  /// <summary>
  /// 指示當前解碼是否在運行
  /// </summary>
  public bool IsRun { get; protected set; }
  /// <summary>
  /// 當前線程
  /// </summary>
  private Thread thread;
  /// <summary>
  /// 退出控制
  /// </summary>
  private bool exit_thread = false;
  /// <summary>
  /// 暫??刂?
  /// </summary>
  private bool pause_thread = false;
  /// <summary>
  /// 視頻輸出流videoindex
  /// </summary>
  private int videoindex = -1;
  /// <summary>
  /// 音頻輸出流audioindex
  /// </summary>
  private int audioindex = -1; 
  private bool isInit = false; 
 
  int error;
  AVFormatContext* ofmt_ctx = null;
  AVPacket* packet;
  AVCodecContext* pCodecCtx_Video;
  AVCodec* pCodec_Video;
  AVFrame* pFrame_Video;
  AVFrame* pFrameYUV_Video;
  SwsContext* sws_ctx_video;
  SDLHelper sdlVideo;
  SDLAudio sdlAudio;
 
  int out_buffer_size_video;
  byte* out_buffer_video;
  int video_frame_count, audio_frame_count; 
 
  AVCodecContext* pCodeCtx_Audio;
  AVCodec* pCodec_Audio;
  AVFrame* frame_Audio;
  SwrContext* swrCtx_Audio;
 
  byte* out_buffer_audio;
  int out_buffer_size_audio;
  int out_channel_nb;
  AVSampleFormat out_sample_fmt;
 
  /// <summary>
  /// 初始化
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  /// <param name="sdlAudio"></param>
  /// <returns></returns>
  public int Init(string fileName, SDLHelper sdlVideo, SDLAudio sdlAudio)
  {
   AVFormatContext* ofmt_ctx;
 
   // 注冊編解碼器
   ffmpeg.avcodec_register_all();
 
   // 獲取文件信息上下文初始化
   ofmt_ctx = ffmpeg.avformat_alloc_context();
   this.ofmt_ctx = ofmt_ctx;
 
   // 打開媒體文件
   error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
   if (error != 0)
   {
    throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
   }
 
   // 獲取流的通道
   for (int i = 0; i < ofmt_ctx->nb_streams; i++)
   {
    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
    {
     videoindex = i;
     Console.WriteLine("video.............." + videoindex);
    }
    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
    {
     audioindex = i;
     Console.WriteLine("audio.............." + audioindex);
    }
   }
 
   if (videoindex == -1)
   {
    Console.WriteLine("Couldn't find a video stream.(沒有找到視頻流)");
    return -1;
   }
 
   if (audioindex == -1)
   {
    Console.WriteLine("Couldn't find a audio stream.(沒有找到音頻流)");
    return -1;
   }
 
   #region 初始化視頻
 
   // 視頻流處理
   if (videoindex > -1)
   {
    //獲取視頻流中的編解碼上下文
    pCodecCtx_Video = ofmt_ctx->streams[videoindex]->codec;
 
    //根據(jù)編解碼上下文中的編碼id查找對應的解碼
    pCodec_Video = ffmpeg.avcodec_find_decoder(pCodecCtx_Video->codec_id);
    if (pCodec_Video == null)
    {
     Console.WriteLine("沒有找到編碼器");
     return -1;
    }
 
    //打開編碼器
    if (ffmpeg.avcodec_open2(pCodecCtx_Video, pCodec_Video, null) < 0)
    {
     Console.WriteLine("編碼器無法打開");
     return -1;
    }
    Console.WriteLine("Find a video stream.channel=" + videoindex);
 
    //輸出視頻信息
    var format = ofmt_ctx->iformat->name->ToString();
    var len = (ofmt_ctx->duration) / 1000000;
    var width = pCodecCtx_Video->width;
    var height = pCodecCtx_Video->height;
    Console.WriteLine("video format:" + format);
    Console.WriteLine("video length:" + len);
    Console.WriteLine("video width&height:width=" + width + " height=" + height);
    Console.WriteLine("video codec name:" + pCodec_Video->name->ToString());
 
    //準備讀取
    //AVPacket用于存儲一幀一幀的壓縮數(shù)據(jù)(H264)
 
    //AVFrame用于存儲解碼后的像素數(shù)據(jù)(YUV)
    //內存分配
    pFrame_Video = ffmpeg.av_frame_alloc();
    //YUV420
    pFrameYUV_Video = ffmpeg.av_frame_alloc();
    //只有指定了AVFrame的像素格式、畫面大小才能真正分配內存
    //緩沖區(qū)分配內存
    out_buffer_size_video = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
    out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
    //初始化緩沖區(qū)
    ffmpeg.avpicture_fill((AVPicture*)pFrameYUV_Video, out_buffer_video, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
    //用于轉碼(縮放)的參數(shù),轉之前的寬高,轉之后的寬高,格式等
    sws_ctx_video = ffmpeg.sws_getContext(pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
   }
   #endregion
 
   #region 初始化音頻
   // 音頻流處理
   if (audioindex > -1)
   {
    //根據(jù)索引拿到對應的流,根據(jù)流拿到解碼器上下文
    pCodeCtx_Audio = ofmt_ctx->streams[audioindex]->codec;
 
    //再根據(jù)上下文拿到編解碼id,通過該id拿到解碼器
    pCodec_Audio = ffmpeg.avcodec_find_decoder(pCodeCtx_Audio->codec_id);
    if (pCodec_Audio == null)
    {
     Console.WriteLine("沒有找到編碼器");
     return -1;
    }
    //打開編碼器
    if (ffmpeg.avcodec_open2(pCodeCtx_Audio, pCodec_Audio, null) < 0)
    {
     Console.WriteLine("編碼器無法打開");
     return -1;
    }
    Console.WriteLine("Find a audio stream. channel=" + audioindex);
 
    //解壓縮數(shù)據(jù)
    frame_Audio = ffmpeg.av_frame_alloc();
 
    //frame->16bit 44100 PCM 統(tǒng)一音頻采樣格式與采樣率
    swrCtx_Audio = ffmpeg.swr_alloc();
    //重采樣設置選項-----------------------------------------------------------start
    //輸入的采樣格式
    AVSampleFormat in_sample_fmt = pCodeCtx_Audio->sample_fmt;
    //輸出的采樣格式 16bit PCM
    out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
    //輸入的采樣率
    int in_sample_rate = pCodeCtx_Audio->sample_rate;
    //輸出的采樣率
    int out_sample_rate = 44100;
    //輸入的聲道布局
    long in_ch_layout = (long)pCodeCtx_Audio->channel_layout;
    //輸出的聲道布局
    int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;
 
    ffmpeg.swr_alloc_set_opts(swrCtx_Audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
    ffmpeg.swr_init(swrCtx_Audio);
    //重采樣設置選項-----------------------------------------------------------end
    //獲取輸出的聲道個數(shù)
    out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
    //存儲pcm數(shù)據(jù)
    out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 44100);
   }
   #endregion
 
   //緩沖區(qū),開辟空間
   packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));
 
   // 設置SDL播放對象
   this.sdlVideo = sdlVideo;
   this.sdlAudio = sdlAudio; 
   isInit = true; 
   return 0;
  } 
 
  /// <summary>
  /// 讀取音視頻流文件并進行播放
  /// </summary>
  public unsafe int ReadAndPlay()
  {
   IsRun = true;
   exit_thread = false;
   pause_thread = false;
   thread = Thread.CurrentThread;
   //int error, frame_count = 0;
   int got_frame, ret;
   //SwsContext* pSwsCtx = null;
 
   byte* out_audio_buffer = out_buffer_audio;
 
   try
   { 
    while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0)
    {
 
     // 退出線程
     if (exit_thread)
     {
      break;
     }
     // 暫停解析
     while (pause_thread)
     {
      Thread.Sleep(100);
     }
     #region 視頻H264轉YUV并使用SDL進行播放
     if (packet->stream_index == videoindex)
     {
      //解碼一幀視頻壓縮數(shù)據(jù),得到視頻像素數(shù)據(jù)
      ret = ffmpeg.avcodec_decode_video2(pCodecCtx_Video, pFrame_Video, &got_frame, packet);
      if (ret < 0)
      {
       Console.WriteLine("視頻解碼錯誤");
       return -1;
      }
 
      // 讀取解碼后的幀數(shù)據(jù)
      if (got_frame > 0)
      {
       double pts = 0; //ffmpeg.av_frame_get_best_effort_timestamp(pFrameYUV_Video);
       //VideoState* vs = null;
       //vs->video_clock = pts;
       //vs->video_st = ofmt_ctx->streams[videoindex];
       //pts = synchronize_video(vs, pFrame_Video, pts);
       //if (queue_picture(is, pFrame, pts) < 0)
       //{
       // break;
       //}
       video_frame_count++;
       //存在問題的PTS計算
 
       //int pts = video_frame_count++ * (pCodecCtx_Video->pkt_timebase.num * 1000 / 25 /* pCodecCtx->pkt_timebase.den*/);
 
       Console.WriteLine("視頻幀數(shù):第 " + video_frame_count + " 幀");
 
       //AVFrame轉為像素格式YUV420,寬高
       ffmpeg.sws_scale(sws_ctx_video, pFrame_Video->data, pFrame_Video->linesize, 0, pCodecCtx_Video->height, pFrameYUV_Video->data, pFrameYUV_Video->linesize);
 
       Console.WriteLine("視頻: pts= " + packet->pts + " dts=" + packet->dts);
 
       // SDL播放YUV數(shù)據(jù):下面兩種方式都可以進行播放
       sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
       //sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)pFrameYUV_Video->data[0], out_buffer_size_video, pFrameYUV_Video->linesize[0]);
 
       //DeleyToPlay_Video(packet->pts);
      }
     }
     #endregion
 
     #region 音頻AAC轉PCM并使用SDL進行播放
     if (packet->stream_index == audioindex)
     {
      //解碼AVPacket->AVFrame
      ret = ffmpeg.avcodec_decode_audio4(pCodeCtx_Audio, frame_Audio, &got_frame, packet);
      if (ret < 0)
      {
       Console.WriteLine("音頻解碼失敗");
       return -1;
      }
      // 讀取幀數(shù)據(jù)
      if (got_frame > 0)
      {
       audio_frame_count++;
       Console.WriteLine("音頻幀數(shù):第 " + audio_frame_count + " 幀");
       // 變換音頻
       ffmpeg.swr_convert(swrCtx_Audio, &out_audio_buffer, 2 * 44100, (byte**)&frame_Audio->data, frame_Audio->nb_samples);
 
       // 獲取sample的size
       out_buffer_size_audio = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame_Audio->nb_samples, out_sample_fmt, 1);
 
       Console.WriteLine("音頻: pts= " + packet->pts + " dts=" + packet->dts);
 
       // SDL進行音頻播放
       sdlAudio.PlayAudio((IntPtr)out_audio_buffer, out_buffer_size_audio);
 
       //DeleyToPlay_Audio(packet->pts);
 
      }
     }
     #endregion
     Thread.Sleep(20);
     //釋放資源
     ffmpeg.av_free_packet(packet);
    } 
 
   }
   catch (Exception ex)
   {
    Console.WriteLine(ex);
   }
   finally
   {
    //if (&ofmt_ctx != null)
    //{
    // ffmpeg.avformat_close_input(&ofmt_ctx);//關閉流文件 
    //}
 
   }
   IsRun = false;
   return 0;
  }
 
  /// <summary>
  /// 開啟線程
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  /// <param name="sdlAudio"></param>
  public void Start()
  {
   if (!isInit)
   {
    MessageBox.Show("沒有初始化");
   }
   thread = new Thread(() =>
   {
    try
    {
     ReadAndPlay();
    }
    catch (Exception ex)
    {
     SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Video", ex);
    }
   });
   thread.IsBackground = true;
   thread.Start(); 
  }
 
  /// <summary>
  /// 暫停繼續(xù)
  /// </summary>
  public void GoOnPlay()
  {
   pause_thread = false;
   sdlVideo.PlayVideo();
   sdlAudio.PlayAudio();
  }
 
  /// <summary>
  /// 暫停
  /// </summary>
  public void Pause()
  {
   pause_thread = true;
   sdlVideo.PauseVideo();
   sdlAudio.PauseAudio();
  }
 
  /// <summary>
  /// 停止
  /// </summary>
  public void Stop()
  {
   exit_thread = true;
  }
 
  long lastPts_Video = 0;
  DateTime lastTS_Video;
 
  long lastPts_Audio = 0;
  DateTime lastTS_Audio;
 
  private void DeleyToPlay_Video(long pts)
  {
   if (lastPts_Video > 0 && lastTS_Video != null)
   {
    double delay = (DateTime.Now - lastTS_Video).TotalMilliseconds;
    var i = (int)(pts - lastPts_Video - delay);
    if (i >= 1)
    {
     Thread.Sleep(i);
    }
   }
   lastTS_Video = DateTime.Now;
   lastPts_Video = pts;
  }
 
  private void DeleyToPlay_Audio(long pts)
  {
   if (lastPts_Audio > 0 && lastTS_Audio != null)
   {
    double delay = (DateTime.Now - lastTS_Audio).TotalMilliseconds;
    var i = (int)(pts - lastPts_Audio - delay);
    if (i >= 1)
    {
     Thread.Sleep(i);
    }
   }
   lastTS_Audio = DateTime.Now;
   lastPts_Audio = pts;
  }
 
  # http://dranger.com/ffmpeg/tutorial05.html
  //public struct VideoState
  //{
  // public double video_clock; // pts of last decoded frame / predicted pts of next decoded frame
 
  // public AVStream* video_st;// video stream
  //}
 
  //public unsafe double synchronize_video(VideoState* vs, AVFrame* src_frame, double pts)
  //{
 
  // double frame_delay;
 
  // if (pts != 0)
  // {
  //  /* if we have pts, set video clock to it */
  //  vs->video_clock = pts;
  // }
  // else
  // {
  //  /* if we aren't given a pts, set it to the clock */
  //  pts = vs->video_clock;
  // }
  // /* update the video clock */
  // frame_delay = av_q2d(vs->video_st->codec->time_base);
  // /* if we are repeating a frame, adjust clock accordingly */
  // frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
  // vs->video_clock += frame_delay;
  // return pts;
  //}
 
  //struct VideoPicture
  //{
  // double pts;
  //}
  //int queue_picture(VideoState* vs, AVFrame* pFrame, double pts)
  //{
  // if (vp->bmp)
  // {
  // ... convert picture ...
  //   vp->pts = pts;
  // ... alert queue ...
  // }
 
  //}
 }

解決音視頻同步問題版本

using CV.Media.Utils.Filter;
using CV.Video.Base;
using CV.Video.Base.FFmpeg;
using FFmpeg.AutoGen;
using JX;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using static CvNetVideo.UCVideo;
 
namespace CvNetVideo.Codec.Video
{ 
 /// <summary>
 /// MP4播放(音視頻使用同一個線程)
 /// </summary>
 public unsafe class JT1078CodecToPlayMp4
 { 
  /// <summary>
  /// 指示當前解碼是否在運行
  /// </summary>
  public bool IsRun { get; protected set; }
  /// <summary>
  /// 指示當前解碼是否在暫停
  /// </summary>
  public bool IsPause { get; protected set; }
  /// <summary>
  /// 當前線程
  /// </summary>
  public Thread thread;
  /// <summary>
  /// 退出控制
  /// </summary>
  private bool exit_thread = false;
  /// <summary>
  /// 暫??刂?
  /// </summary>
  private bool pause_thread = false;
  /// <summary>
  /// 視頻輸出流videoindex
  /// </summary>
  private int videoindex = -1;
  /// <summary>
  /// 音頻輸出流audioindex
  /// </summary>
  private int audioindex = -1;
  /// <summary>
  /// 是否初始化
  /// </summary>
  private bool isInit = false; 
 
  int error;
  AVFormatContext* ofmt_ctx = null;
  AVPacket* packet;
  AVCodecContext* pCodecCtx_Video;
  AVCodec* pCodec_Video;
  AVFrame* pFrame_Video;
  AVFrame* pFrameYUV_Video;
  SwsContext* sws_ctx_video;
  SDLHelper sdlVideo;
  SDLAudio sdlAudio;
 
  int out_buffer_size_video;
  byte* out_buffer_video;
  int video_frame_count, audio_frame_count; 
 
  AVCodecContext* pCodeCtx_Audio;
  AVCodec* pCodec_Audio;
  AVFrame* frame_Audio;
  SwrContext* swrCtx_Audio;
 
  byte* out_buffer_audio;
  int out_buffer_size_audio;
  int out_channel_nb;
  AVSampleFormat out_sample_fmt;
 
  int contrast;// 對比度
  int brightness;// 亮度
  int contrast_last;// 對比度
  int brightness_last;// 亮度
 
  //對比度亮度
  private VideoFiltering m_video_filtering = new VideoFiltering();
 
  /// <summary>
  /// 設置圖像對比度和亮度
  /// </summary>
  /// <param name="contrast"></param>
  /// <param name="brightness"></param>
  /// <returns></returns>
  public void SetContrastAndBrightness(int contrast, int brightness)
  {
   this.contrast = contrast;
   this.brightness = brightness;
  }
  /// <summary>
  /// YUV寬度
  /// </summary>
  public int YuvWidth { get; set; }
  /// <summary>
  /// YUV高度
  /// </summary>
  public int YuvHeight { get; set; }
 
  /// <summary>
  /// 記錄上一幀數(shù)據(jù)
  /// </summary>
  List<AVVideo> list = new List<AVVideo>();
 
  /// <summary>
  /// 初始化
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  /// <param name="sdlAudio"></param>
  /// <returns></returns>
  public int Init(string fileName, SDLHelper sdlVideo, SDLAudio sdlAudio)
  {
   AVFormatContext* ofmt_ctx;
 
   // 注冊編解碼器
   ffmpeg.avcodec_register_all();
 
   // 獲取文件信息上下文初始化
   ofmt_ctx = ffmpeg.avformat_alloc_context();
   this.ofmt_ctx = ofmt_ctx;
 
   // 打開媒體文件
   error = ffmpeg.avformat_open_input(&ofmt_ctx, fileName, null, null);
   if (error != 0)
   {
    throw new ApplicationException(FFmpegBinariesHelper.GetErrorMessage(error));
   }
 
   // 獲取流的通道
   for (int i = 0; i < ofmt_ctx->nb_streams; i++)
   {
    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
    {
     videoindex = i;
     Console.WriteLine("video.............." + videoindex);
    }
    if (ofmt_ctx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
    {
     audioindex = i;
     Console.WriteLine("audio.............." + audioindex);
    }
   }
 
   if (videoindex == -1)
   {
    Console.WriteLine("Couldn't find a video stream.(沒有找到視頻流)");
    return -1;
   }
 
   if (audioindex == -1)
   {
    Console.WriteLine("Couldn't find a audio stream.(沒有找到音頻流)");
    return -1;
   }
 
   #region 初始化視頻
 
   // 視頻流處理
   if (videoindex > -1)
   {
    //獲取視頻流中的編解碼上下文
    pCodecCtx_Video = ofmt_ctx->streams[videoindex]->codec;
 
    //根據(jù)編解碼上下文中的編碼id查找對應的解碼
    pCodec_Video = ffmpeg.avcodec_find_decoder(pCodecCtx_Video->codec_id);
    if (pCodec_Video == null)
    {
     Console.WriteLine("沒有找到編碼器");
     return -1;
    }
 
    //打開編碼器
    if (ffmpeg.avcodec_open2(pCodecCtx_Video, pCodec_Video, null) < 0)
    {
     Console.WriteLine("編碼器無法打開");
     return -1;
    }
    Console.WriteLine("Find a video stream.channel=" + videoindex);
 
    //輸出視頻信息
    var format = ofmt_ctx->iformat->name->ToString();
    var len = (ofmt_ctx->duration) / 1000000;
    var width = pCodecCtx_Video->width;
    var height = pCodecCtx_Video->height;
    Console.WriteLine("video format:" + format);
    Console.WriteLine("video length:" + len);
    Console.WriteLine("video width&height:width=" + width + " height=" + height);
    Console.WriteLine("video codec name:" + pCodec_Video->name->ToString());
 
    //準備讀取
    //AVPacket用于存儲一幀一幀的壓縮數(shù)據(jù)(H264)
 
    //AVFrame用于存儲解碼后的像素數(shù)據(jù)(YUV)
    //內存分配
    pFrame_Video = ffmpeg.av_frame_alloc();
    //YUV420
    pFrameYUV_Video = ffmpeg.av_frame_alloc();
    //只有指定了AVFrame的像素格式、畫面大小才能真正分配內存
    //緩沖區(qū)分配內存
    out_buffer_size_video = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
    out_buffer_video = (byte*)ffmpeg.av_malloc((ulong)out_buffer_size_video);
    //初始化緩沖區(qū)
    ffmpeg.avpicture_fill((AVPicture*)pFrameYUV_Video, out_buffer_video, AVPixelFormat.AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height);
    //用于轉碼(縮放)的參數(shù),轉之前的寬高,轉之后的寬高,格式等
    sws_ctx_video = ffmpeg.sws_getContext(pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, pCodecCtx_Video->width, pCodecCtx_Video->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
   }
   #endregion
 
   #region 初始化音頻
   // 音頻流處理
   if (audioindex > -1)
   {
    //根據(jù)索引拿到對應的流,根據(jù)流拿到解碼器上下文
    pCodeCtx_Audio = ofmt_ctx->streams[audioindex]->codec;
 
    //再根據(jù)上下文拿到編解碼id,通過該id拿到解碼器
    pCodec_Audio = ffmpeg.avcodec_find_decoder(pCodeCtx_Audio->codec_id);
    if (pCodec_Audio == null)
    {
     Console.WriteLine("沒有找到編碼器");
     return -1;
    }
    //打開編碼器
    if (ffmpeg.avcodec_open2(pCodeCtx_Audio, pCodec_Audio, null) < 0)
    {
     Console.WriteLine("編碼器無法打開");
     return -1;
    }
    Console.WriteLine("Find a audio stream. channel=" + audioindex);
 
    //解壓縮數(shù)據(jù)
    frame_Audio = ffmpeg.av_frame_alloc();
 
    //frame->16bit 8000 PCM 統(tǒng)一音頻采樣格式與采樣率
    swrCtx_Audio = ffmpeg.swr_alloc();
    //重采樣設置選項-----------------------------------------------------------start
    //輸入的采樣格式
    AVSampleFormat in_sample_fmt = pCodeCtx_Audio->sample_fmt;
    //輸出的采樣格式 16bit PCM
    out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
    //輸入的采樣率
    int in_sample_rate = pCodeCtx_Audio->sample_rate;
    //輸出的采樣率
    int out_sample_rate = 8000;
    //輸入的聲道布局
    long in_ch_layout = (long)pCodeCtx_Audio->channel_layout;
    //輸出的聲道布局
    int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;
 
    ffmpeg.swr_alloc_set_opts(swrCtx_Audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
    ffmpeg.swr_init(swrCtx_Audio);
    //重采樣設置選項-----------------------------------------------------------end
    //獲取輸出的聲道個數(shù)
    out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
    //存儲pcm數(shù)據(jù)
    out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 8000);
   }
   #endregion
 
   //緩沖區(qū),開辟空間
   packet = (AVPacket*)ffmpeg.av_malloc((ulong)sizeof(AVPacket));
 
   // 設置SDL播放對象
   this.sdlVideo = sdlVideo;
   this.sdlAudio = sdlAudio; 
   isInit = true; 
   return 0;
  } 
 
  /// <summary>
  /// 讀取音視頻流文件并進行播放
  /// </summary>
  public unsafe int ReadAndPlay(PlayFinishedDo playFinishedDo)
  {
   IsRun = true;
   exit_thread = false;
   pause_thread = false;
   thread = Thread.CurrentThread;
   //int error, frame_count = 0;
   int got_frame, ret;
   //SwsContext* pSwsCtx = null;
 
   byte* out_audio_buffer = out_buffer_audio;
 
   try
   {
    AVStream* video_stream = ofmt_ctx->streams[videoindex];
 
    while (ffmpeg.av_read_frame(ofmt_ctx, packet) >= 0&& !exit_thread)
    {
     // 暫停解析
     while (pause_thread||isLastFrame)
     {
      // 退出線程
      if (exit_thread)
      {
       break;
      }
      Thread.Sleep(10);
     }
     // 退出線程
     if (exit_thread)
     {
      break;
     }
 
     // 此處記錄視頻的第一幀和第一幀的開始時間
     if (firstPts == -1 && packet->stream_index == videoindex)
     {
      firstPts = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
      startTS = DateTime.Now;
     }
     // 針對視頻做延時播放,音頻自然播放就行不做處理
     if (packet->stream_index == videoindex)
     {
      long pts_1 = packet->pts * 1000 / (video_stream->time_base.den / video_stream->time_base.num);
      DeleyToPlay(pts_1);
     }
 
     #region 視頻H264轉YUV并使用SDL進行播放
     if (packet->stream_index == videoindex)
     {
      //解碼一幀視頻壓縮數(shù)據(jù),得到視頻像素數(shù)據(jù)
      ret = ffmpeg.avcodec_decode_video2(pCodecCtx_Video, pFrame_Video, &got_frame, packet);
      if (ret < 0)
      {
       Console.WriteLine("視頻解碼錯誤");
       return -1;
      }
 
      //濾波,亮度,對比度===參考JT1078ToYuv -----------開始
      int width = pCodecCtx_Video->width;
      int height = pCodecCtx_Video->height;
      if (contrast != contrast_last || brightness != brightness_last)
      {
       m_video_filtering.Reset(width, height, contrast, brightness);
       contrast_last = contrast;
       brightness_last = brightness;
      }
 
      //濾波,亮度,對比度===參考JT1078ToYuv -----------結束
 
      // 讀取解碼后的幀數(shù)據(jù)
      if (got_frame > 0)
      {
       video_frame_count++;
       //>>>>濾波,亮度,對比度===參考JT1078ToYuv -----------開始
       AVFrame* frame_filter;
       ret = m_video_filtering.Filter(pFrame_Video, &frame_filter);
       //>>>>濾波,亮度,對比度===參考JT1078ToYuv -----------結束
 
 
       //AVFrame轉為像素格式YUV420,寬高
       ffmpeg.sws_scale(sws_ctx_video, frame_filter->data, frame_filter->linesize, 0, pCodecCtx_Video->height, pFrameYUV_Video->data, pFrameYUV_Video->linesize);
 
       // 記錄上一幀圖像保持10個幀數(shù)
       AVVideo videoFrame = new AVVideo(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
       list.Add(videoFrame);
       if (list.Count > 10) list.RemoveAt(0);
 
       // SDL播放YUV數(shù)據(jù):下面兩種方式都可以進行播放
       sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height,YuvWidth, YuvHeight, (IntPtr)out_buffer_video, out_buffer_size_video, pFrameYUV_Video->linesize[0]);
       //sdlVideo.SDL_Display(pCodecCtx_Video->width, pCodecCtx_Video->height, (IntPtr)pFrameYUV_Video->data[0], out_buffer_size_video, pFrameYUV_Video->linesize[0]);
 
       // 播放下一幀時進行暫停
       if (isNextFrame)
       {
        Pause();
        isNextFrame = false;
       }
 
       // 釋放濾波
       m_video_filtering.UnrefFrame();
      }
     }
     #endregion
 
     #region 音頻AAC轉PCM并使用SDL進行播放
     if (packet->stream_index == audioindex)
     {
      //解碼AVPacket->AVFrame
      ret = ffmpeg.avcodec_decode_audio4(pCodeCtx_Audio, frame_Audio, &got_frame, packet);
      if (ret < 0)
      {
       Console.WriteLine("音頻解碼失敗");
       return -1;
      }
      // 讀取幀數(shù)據(jù)
      if (got_frame > 0)
      {
       audio_frame_count++;
    
       // 變換音頻
       ffmpeg.swr_convert(swrCtx_Audio, &out_audio_buffer, 2 * 8000, (byte**)&frame_Audio->data, frame_Audio->nb_samples);
 
       // 獲取sample的size
       out_buffer_size_audio = ffmpeg.av_samples_get_buffer_size(null, out_channel_nb, frame_Audio->nb_samples, out_sample_fmt, 1);
 
       // SDL進行音頻播放
       sdlAudio.PlayAudio((IntPtr)out_audio_buffer, out_buffer_size_audio); 
      }
     }
     #endregion
     
     //釋放資源
     ffmpeg.av_free_packet(packet);
 
     Thread.Sleep(10);
    } 
 
   }
   catch (Exception ex)
   {
    Console.WriteLine(ex);
   }
   finally
   {
    // 釋放文件流
    ffmpeg.avformat_free_context(ofmt_ctx);
    // 修改右鍵菜單回調函數(shù)
    playFinishedDo.Invoke();
   }
   IsRun = false;
   IsPause = false;
   return 0;
  }
 
  bool isLastFrame = false;
  bool isNextFrame = false;
  bool playFastly = false;
  bool playSlowly = false;
  int play_speed = 1;
  long firstPts = -1;
  DateTime startTS;
  /// <summary>
  /// 控制快慢
  /// </summary>
  /// <param name="pts"></param>
  /// <param name="speed"></param>
  private void DeleyToPlay(long pts)
  {
   int delayTime = 0;
   try
   {
    // 計算延時
    double delay = (DateTime.Now - startTS).TotalMilliseconds;
    var i = (int)(pts - firstPts - delay);
    if (i >= 100)
    {
     delayTime = 40;
     delayTime = ControlFastOrSlow(delayTime);
    }
    else if (i >= 300)
    {
     delayTime = 60;
     delayTime = ControlFastOrSlow(delayTime);
    }
    else if (i >= 500)
    {
     delayTime = 100;
     delayTime = ControlFastOrSlow(delayTime);
    }
   }
   catch
   {
    Console.WriteLine("Counting delay time error ");
   }
   finally
   {
    Console.WriteLine("Counting delay time = " + delayTime+ " play_speed="+ play_speed);
    if (delayTime > 0)
     Thread.Sleep(delayTime);
		 } 
  }
 
  /// <summary>
  /// 控制快慢
  /// </summary>
  /// <param name="delayTime"></param>
  private int ControlFastOrSlow(int delayTime)
  {
   if (playFastly)
   {
    // 快放
    delayTime /= play_speed;
   }
   else if (playSlowly)
   {
    // 慢放
    delayTime *= play_speed;
   }
   return delayTime;
  } 
  /// <summary>
  /// 開啟線程
  /// </summary>
  /// <param name="fileName"></param>
  /// <param name="sdlVideo"></param>
  /// <param name="sdlAudio"></param>
  public void Start(PlayFinishedDo playFinishedDo)
  {
   if (!isInit)
   {
    MessageBox.Show("沒有初始化");
   }
   thread = new Thread(() =>
   {
    try
    {
     ReadAndPlay(playFinishedDo);
    }
    catch (Exception ex)
    {
     SQ.Base.ErrorLog.WriteLog4Ex("JT1078CodecForMp4.Run Video", ex);
    }
   });
   thread.IsBackground = true;
   thread.Start(); 
  }
 
  /// <summary>
  /// 暫停繼續(xù)
  /// </summary>
  public void GoOnPlay()
  {
   // 重置第一幀pts,處理暫停后音視頻不同步
   firstPts = -1;
   // 繼續(xù)的相關操作和變量修改
   pause_thread = false;
   IsPause = pause_thread;
   sdlVideo.PlayVideo();
   sdlAudio.PlayAudio();
  }
 
  /// <summary>
  /// 暫停
  /// </summary>
  public void Pause()
  {
   // 暫停的相關操作和變量修改
   pause_thread = true;
   IsPause = pause_thread;
   sdlVideo.PauseVideo();
   sdlAudio.PauseAudio();
  }
 
  /// <summary>
  /// 停止
  /// </summary>
  public void Stop()
  {
   exit_thread = true;
   if (thread != null && thread.IsAlive)
   {
    thread.Abort();
    thread.Join();
    thread = null;
   }
  }
 
  /// <summary>
  /// 快放
  /// </summary>
  public void PlayFast()
  {
   if (pause_thread)
   {
    // 激活播放
    GoOnPlay();
   }
   if (playSlowly)
   {
    play_speed = 1;
    playSlowly = false;
   }
   else
   {
    play_speed++;
   }
   playFastly = true; 
  } 
 
  /// <summary>
  /// 慢放
  /// </summary>
  public void PlaySlow()
  {
   if (pause_thread)
   {
    // 激活播放
    GoOnPlay();
   }
   if (playFastly)
   {
    play_speed = 1;
    playFastly = false;
   }
   else
   {
    play_speed++;
   }
   playSlowly = true; 
  }
 
  /// <summary>
  /// 上一幀
  /// </summary>
  public void PlayLastFrame()
  {
   // 修改上一幀標志
   isLastFrame = true;
   // 每點擊一次向前播一幀
   if (list.Count>0)
   {
    Console.WriteLine("剩余播放幀:"+ list.Count);
    // 激活播放
    GoOnPlay();
    AVVideo lastFrame = list.Last();
    // 播放上一幀圖像
    sdlVideo.SDL_Display(lastFrame.width, lastFrame.height, lastFrame.pixels, lastFrame.pixelsSize, lastFrame.pitch);
    // 修改上一幀標志
    isLastFrame = false;
    // 移除已看過的幀
    list.Remove(lastFrame);
    Thread.Sleep(10);
    Pause();
   }   
  }
 
  /// <summary>
  /// 下一幀
  /// </summary>
  public void PlayNextFrame()
  {
   // 暫停以區(qū)分幀
   Pause();
   // 播放以完成下一幀圖像顯示或聲音播放
   GoOnPlay();
   // 下一幀播放完成暫停標志
   isNextFrame = true;
  }
 }
 
 class Media
 {
  /// <summary>
  /// 0:video,1:audio
  /// </summary>
  public int type { get; set; }
 
  /// <summary>
  /// pts value
  /// </summary>
  public long pts { get; set; }
 }
 
 class AVVideo : Media
 {
  public int width { get; set; }
  public int height { get; set; }
  public IntPtr pixels { get; set; }
  public int pixelsSize { get; set; }
  public int pitch { get; set; }
 
  public AVVideo(int width, int height, IntPtr pixels, int pixelsSize, int pitch)
  {
   this.width = width;
   this.height = height;
   this.pixels = pixels;
   this.pixelsSize = pixelsSize;
   this.pitch = pitch;
  }
 }
 class AVAudio : Media
 {
  public IntPtr pcm { get; set; }
  public int len { get; set; }
 
  public AVAudio(IntPtr pcm, int len)
  {
   this.pcm = pcm;
   this.len = len;
  }
 }
}

以上這篇C# 使用SDL2實現(xiàn)Mp4文件播放音視頻操作就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支持腳本之家。

相關文章

  • C# 調用命令行執(zhí)行Cmd命令的操作

    C# 調用命令行執(zhí)行Cmd命令的操作

    這篇文章主要介紹了C# 調用命令行執(zhí)行Cmd命令的操作,具有很好的參考價值,希望對大家有所幫助。一起跟隨小編過來看看吧
    2021-04-04
  • C#實現(xiàn)單線程異步互斥鎖的示例代碼

    C#實現(xiàn)單線程異步互斥鎖的示例代碼

    異步互斥鎖的作用是用于確保存在異步操作的上下文同步互斥,這篇文章主要為大家詳細介紹了C#如何實現(xiàn)單線程異步互斥鎖,文中的示例代碼講解詳細,需要的可以參考下
    2024-01-01
  • C#零基礎學習理解委托

    C#零基礎學習理解委托

    C#零基礎學習理解委托,需要的朋友可以參考一下
    2013-02-02
  • C#語音識別用法實例

    C#語音識別用法實例

    這篇文章主要介紹了C#語音識別用法,實例分析了C#利用微軟操作系統(tǒng)自動的語音識別功能,讀取信息的技巧,需要的朋友可以參考下
    2015-01-01
  • C#使用Process類調用外部exe程序

    C#使用Process類調用外部exe程序

    本文通過兩個示例講解了一下Process類調用外部應用程序的基本用法,并簡單講解了StartInfo屬性,有需要的朋友可以參考一下。
    2016-03-03
  • C#模擬實現(xiàn)抽獎小程序的示例代碼

    C#模擬實現(xiàn)抽獎小程序的示例代碼

    這篇文章主要介紹了通過C#模擬實現(xiàn)一個簡單的抽獎小程序,文中的示例代碼講解詳細,對我們了解C#有一定的幫助,需要的可以參考一下
    2021-12-12
  • C#使用ThoughtWorks.QRCode生成二維碼

    C#使用ThoughtWorks.QRCode生成二維碼

    ThoughtWorks.QRCode是一款功能強勁的動態(tài)鏈接庫,能夠為.net應用生成二維碼,這篇文章主要為大家詳細介紹了C#使用ThoughtWorks.QRCode生成二維碼的具體方法,需要的可以參考下
    2024-04-04
  • C#中兩個byte如何相加

    C#中兩個byte如何相加

    可能有的看到這個題目就會覺得這不簡單嗎?直接用+號相加就行了,可是當你實際操作運行的時候就會發(fā)現(xiàn)有錯誤了,那么是什么錯誤?那該如何讓C#中兩個byte相加呢?通過下面這篇文章來一起學習學習吧。
    2016-11-11
  • C# 對MongoDB 進行增刪改查的簡單操作實例

    C# 對MongoDB 進行增刪改查的簡單操作實例

    這篇文章介紹了C# 對MongoDB 進行增刪改查的簡單操作實例,有需要的朋友可以參考一下
    2013-09-09
  • C#刪除Word文檔中的段落的方法示例

    C#刪除Word文檔中的段落的方法示例

    在編輯Word文檔時,我們有時需要調整段落的布局、刪除不必要的段落以優(yōu)化文檔的結構和閱讀體驗,本文將通過以下3個簡單示例演示如何使用免費.NET庫刪除Word文檔中的段落 ,需要的朋友可以參考下
    2024-09-09

最新評論