2013-02-08 91 views
1

我試圖播放使用ffmpeg和OpenGL + SDL的視頻文件。播放非常緩慢且閃爍。該代碼是來自不同博客/網站的積累,我不確定發生了什麼。抱歉發佈這麼長的代碼,但這是最小化的版本。我的實際代碼在窗口模式下也不能很好地運行。不知何故,下面的版本在窗口模式下播放流暢。OpenGL + ffmpeg在全屏模式下慢

#ifndef INT64_C 
#define INT64_C(c) (int64_t)(c) 
#define UINT64_C(c) (uint64_t)(c) 
#endif 

extern "C" { 
#include <libavcodec/avcodec.h> 
#include <libavformat/avformat.h> 
#include <libswscale/swscale.h> 
} 
#include <SDL.h> 
#include <GL/gl.h> 

int fullscreen = 1, videoStream = -1, frameFinished=0; 
const PixelFormat CONV_FORMAT = PIX_FMT_RGB24; 
const char *fname = "moviesample.mp4"; 
AVFormatContext *pFormatCtx = NULL; 
AVCodecContext *pCodecCtx = NULL; 
AVCodec   *pCodec = NULL; 
AVFrame   *pFrame = 0, *pFrameRGB = 0; 
AVPacket  packet; 
AVDictionary *optionsDict = NULL; 
struct SwsContext *sws_ctx = NULL; 
GLuint texture_video; 

void av_init(); 
void draw_frame(); 

int main(int argc, const char **argv) { 
    SDL_Event event; 

    av_init(); 

    uint16_t width = fullscreen ? 1600 : pCodecCtx->width; 
    uint16_t height = fullscreen ? 900 : pCodecCtx->height; 

    SDL_Init(SDL_INIT_EVERYTHING); 
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); 
    SDL_SetVideoMode(width, height, 32, 
     SDL_OPENGL | SDL_HWPALETTE | SDL_HWSURFACE | SDL_HWACCEL | 
     (fullscreen ? SDL_FULLSCREEN : 0) 
    ); 

    glEnable(GL_TEXTURE_2D); 
    glClearColor(0.0f, 0.4f, 0.4f, 0.0f); 
    glViewport(0, 0, width, height); 
    glMatrixMode(GL_PROJECTION); 
    glLoadIdentity(); 
    glMatrixMode(GL_MODELVIEW); 
    glLoadIdentity(); 
    glShadeModel(GL_SMOOTH); 
    glGenTextures(1, &texture_video); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pCodecCtx->width, pCodecCtx->height, 
     0, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 

    sws_ctx = sws_getCachedContext(sws_ctx, pCodecCtx->width, pCodecCtx->height, 
     pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, CONV_FORMAT, 
     SWS_BICUBIC, NULL, NULL, NULL); 

    while (1) { 

    draw_frame(); 

    SDL_GL_SwapBuffers(); 

    SDL_PollEvent(&event); 

    switch(event.type) { 
     case SDL_QUIT: 
     SDL_Quit(); 
     exit(0); 
     break; 
     case SDL_KEYDOWN: 
     if (event.key.keysym.sym == SDLK_ESCAPE) { 
      SDL_Quit(); 
      exit(0); 
     } 
     break; 
     default: 
     break; 
    } 
    } 
    return 0; 
} 

void draw_frame() { 
    if (av_read_frame(pFormatCtx, &packet)>=0) { 
    if(packet.stream_index==videoStream) { 
     avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
     if(frameFinished) { 
     sws_scale (sws_ctx, (uint8_t const * const *)pFrame->data, 
      pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, 
      pFrameRGB->linesize); 
     glBindTexture(GL_TEXTURE_2D, texture_video); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
     glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pCodecCtx->width, 
      pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
     } 

     glClear(GL_COLOR_BUFFER_BIT); 
     glScalef(1.0f, -1.0f, 1.0f); 
     glBegin(GL_QUADS); 
     glTexCoord2f(0.0f, 0.0f); 
     glVertex3f(-1.0f, -1.0f, 0.0f); 
     glTexCoord2f(0.0f, 1.0f); 
     glVertex3f(-1.0f, 1.0f, 0.0f); 
     glTexCoord2f(1.0f, 1.0f); 
     glVertex3f(1.0f, 1.0f, 0.0f); 
     glTexCoord2f(1.0f, 0.0f); 
     glVertex3f(1.0f, -1.0f, 0.0f); 
     glEnd(); 
     glScalef(1.0f, -1.0f, 1.0f); 

    } 
    av_free_packet(&packet); 
    } else { 
    av_seek_frame(pFormatCtx, videoStream, 0, AVSEEK_FLAG_FRAME); 
    } 

} 

void av_init() { 
    av_register_all(); 
    avformat_open_input(&pFormatCtx, fname, NULL, NULL); 
    avformat_find_stream_info(pFormatCtx, NULL); 
    for(uint8_t i=0; i<pFormatCtx->nb_streams; i++) 
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
     videoStream=i; 
     break; 
    } 
    pCodecCtx = pFormatCtx->streams[videoStream]->codec; 
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id); 
    avcodec_open2(pCodecCtx, pCodec, &optionsDict); 
    pFrame = avcodec_alloc_frame(); 
    pFrameRGB = avcodec_alloc_frame(); 
    int bytes = avpicture_get_size(CONV_FORMAT, pCodecCtx->width, 
    pCodecCtx->height);   
    uint8_t *video_buffer = (uint8_t*)av_malloc(bytes * sizeof(uint8_t)); 
    avpicture_fill((AVPicture *)pFrameRGB, video_buffer, CONV_FORMAT, 
     pCodecCtx->width, pCodecCtx->height); 
} 
+0

您使用在glBegin/glEnd調用每個循環都有完全相同的參數。這將頂點緩衝區從CPU上傳到GPU,每個幀都有開銷。您可以創建兩個頂點緩衝區(一個用於位置,另一個用於紋理座標),這些緩衝區將駐留在GPU上,因此每幀不會浪費。 – 2013-02-08 23:00:34

回答

4

在全屏你可能獲得垂直同步,這意味着SDL_GL_SwapBuffers()將爲16毫秒左右,每幀塊。

要在窗口模式下模擬效果,請在循環的末尾添加SDL_Delay(16)

重寫draw_frame()使其泵libav直到它得到下一幀,而不是隻抽一次根據主迴路和希望你會得到一個框架:

// g++ main.cpp `pkg-config sdl gl libswscale libavcodec libavformat --libs --cflags` && SDL_VIDEO_FULLSCREEN_HEAD=0 ./a.out 
#ifndef INT64_C 
#define INT64_C(c) (int64_t)(c) 
#define UINT64_C(c) (uint64_t)(c) 
#endif 

extern "C" { 
#include <libavcodec/avcodec.h> 
#include <libavformat/avformat.h> 
#include <libswscale/swscale.h> 
} 
#include <SDL.h> 
#include <GL/gl.h> 

int fullscreen = 1, videoStream = -1, frameFinished=0; 
const PixelFormat CONV_FORMAT = PIX_FMT_RGB24; 
const char *fname = "/home/genpfault/vid.mpg"; 
AVFormatContext *pFormatCtx = NULL; 
AVCodecContext *pCodecCtx = NULL; 
AVCodec   *pCodec = NULL; 
AVFrame   *pFrame = 0, *pFrameRGB = 0; 
AVPacket  packet; 
AVDictionary *optionsDict = NULL; 
struct SwsContext *sws_ctx = NULL; 
GLuint texture_video; 

void av_init(); 
void next_frame(); 

int main(int argc, const char **argv) { 
    SDL_Event event; 

    av_init(); 

    uint16_t width = fullscreen ? 1920 : pCodecCtx->width; 
    uint16_t height = fullscreen ? 1200 : pCodecCtx->height; 

    SDL_Init(SDL_INIT_EVERYTHING); 
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); 
    SDL_SetVideoMode(width, height, 32, 
     SDL_OPENGL | 
     (fullscreen ? SDL_FULLSCREEN : 0) 
    ); 

    glEnable(GL_TEXTURE_2D); 
    glClearColor(0.0f, 0.4f, 0.4f, 0.0f); 
    glViewport(0, 0, width, height); 
    glMatrixMode(GL_PROJECTION); 
    glLoadIdentity(); 
    glMatrixMode(GL_MODELVIEW); 
    glLoadIdentity(); 
    glShadeModel(GL_SMOOTH); 
    glGenTextures(1, &texture_video); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pCodecCtx->width, pCodecCtx->height, 
     0, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 

    sws_ctx = sws_getCachedContext(sws_ctx, pCodecCtx->width, pCodecCtx->height, 
     pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, CONV_FORMAT, 
     SWS_BICUBIC, NULL, NULL, NULL); 

    while (1) { 

    while(SDL_PollEvent(&event)) 
    { 
     switch(event.type) { 
      case SDL_QUIT: 
      SDL_Quit(); 
      exit(0); 
      break; 
      case SDL_KEYDOWN: 
      if (event.key.keysym.sym == SDLK_ESCAPE) { 
       SDL_Quit(); 
       exit(0); 
      } 
      break; 
      default: 
      break; 
     } 
    } 

    next_frame(); 

    glClear(GL_COLOR_BUFFER_BIT); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glScalef(1.0f, -1.0f, 1.0f); 
    glBegin(GL_QUADS); 
    glTexCoord2f(0.0f, 0.0f); 
    glVertex3f(-1.0f, -1.0f, 0.0f); 
    glTexCoord2f(0.0f, 1.0f); 
    glVertex3f(-1.0f, 1.0f, 0.0f); 
    glTexCoord2f(1.0f, 1.0f); 
    glVertex3f(1.0f, 1.0f, 0.0f); 
    glTexCoord2f(1.0f, 0.0f); 
    glVertex3f(1.0f, -1.0f, 0.0f); 
    glEnd(); 
    glScalef(1.0f, -1.0f, 1.0f); 

    SDL_GL_SwapBuffers(); 
    } 
    return 0; 
} 

void next_frame() 
{ 
    while(true) 
    { 
     if(av_read_frame(pFormatCtx, &packet) >= 0) 
     { 
      if(packet.stream_index == videoStream) 
      { 
       avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
       if(frameFinished) 
       { 
        sws_scale (sws_ctx, (uint8_t const * const *)pFrame->data, 
        pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, 
        pFrameRGB->linesize); 
        glBindTexture(GL_TEXTURE_2D, texture_video); 
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1); 
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pCodecCtx->width, 
        pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
        break; 
       } 
      } 
      av_free_packet(&packet); 
     } 
     else 
     { 
      av_seek_frame(pFormatCtx, videoStream, 0, AVSEEK_FLAG_FRAME); 
     }   
    } 
} 

void av_init() { 
    av_register_all(); 
    avformat_open_input(&pFormatCtx, fname, NULL, NULL); 
    avformat_find_stream_info(pFormatCtx, NULL); 
    for(uint8_t i=0; i<pFormatCtx->nb_streams; i++) 
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
     videoStream=i; 
     break; 
    } 
    pCodecCtx = pFormatCtx->streams[videoStream]->codec; 
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id); 
    avcodec_open2(pCodecCtx, pCodec, &optionsDict); 
    pFrame = avcodec_alloc_frame(); 
    pFrameRGB = avcodec_alloc_frame(); 
    int bytes = avpicture_get_size(CONV_FORMAT, pCodecCtx->width, 
    pCodecCtx->height);   
    uint8_t *video_buffer = (uint8_t*)av_malloc(bytes * sizeof(uint8_t)); 
    avpicture_fill((AVPicture *)pFrameRGB, video_buffer, CONV_FORMAT, 
     pCodecCtx->width, pCodecCtx->height); 
} 
+0

謝謝!它的工作原理:)如果流不是videoStream,或者frameFinished是真的,那麼在'next_frame'中是否有泄漏?這些條件跳過'av_free_packet'調用。 – fusha 2013-02-09 06:58:15

+0

呃,可能:)沒有什麼'libav'的傢伙。 – genpfault 2013-02-09 09:13:40