首页 > 代码库 > 基于ffmpeg+SDL2 实现简单rtsp播放器
基于ffmpeg+SDL2 实现简单rtsp播放器
参考资料:
编译参考: http://blog.chinaunix.net/uid-20718335-id-2980793.html
代码参考: http://blog.csdn.net/leixiaohua1020/article/details/8652605
实现ffmpeg在window下编译,并基于ffmpeg动态库用测试程序播放本地文件和RTSP视频流
csdn博客插个图片怎么这么麻烦,上篇辛辛苦苦截了那么多图一上传全没了,代码里想变色结果发布了一看全成HTML掺在程序里了,真是无语。。。
1、下载安装MinGW,http://sourceforge.net/projects/mingw/ 下载安装MinGW Installation Manager,选择mingw-develop-toolkit、mingw32-base、mingw32-gcc-g++、mysys-base,然后Apply Changes
2、进入到MinGW安装目录,默认是C:\MInGW,编辑C:\MinGW\msys\1.0\msys.bat,在文件开始加上 call "C:\Program Files\Microsoft Visual Studio 10.0\VC\bin\vcvars32.bat" 根据具体VS路径,保存。
3、下载ffmpeg http://www.ffmpeg.org/releases/ 选择最新版本,当前是ffmpeg-2.2.4.tar.gz
下载yasm http://yasm.tortall.net/Download.html(ffmpeg汇编编译器)
下载mp3lame http://sourceforge.net/projects/lame/files/(支持MP3)
下载x264 http://www.videolan.org/developers/x264.html(支持264)
下载SDL2 http://www.libsdl.org/release/SDL2-devel-2.0.3-VC.zip (SDL 多媒体开发库,用来显示,因此直接下载已编译好的)
另外几个可选项:
Xxid http://www.xvid.org/
faad2 http://www.audiocoding.com/downloads.html
faac http://www.audiocoding.com/downloads.html
aacenc http://sourceforge.net/projects/opencore-amr/files/
amr-nb amr-wb http://www.penguin.cz/~utx/amr
opencore-amr http://sourceforge.net/projects/opencore-amr/files/
4、解压yasm,将exe放到c:\windows\system32下
解压ffmpeg,并将上面的第三方包分别解压到ffmpeg文件夹下
运行mysys.bat
安装mp3lame
cd lame-3.99.2 ./configure --disable-shared make make install
cd x264-snapshot-20140703-2245 ./configure --enable-shared make make install
安装其他第三方库同上
默认安装到C:\MinGW\msys\1.0\local下
安装ffmpeg
cd ffmpeg-2.2.4 ./configure --enable-shared --disable-static --enable-ffplay --enable-nonfree --enable-memalign-hack --enable-libmp3lame --enable-gpl --enable-libx264 --enable-version3 --extra-cflags=-IC:/MinGW/msys/1.0/local/include --extra-ldflags=-LC:/MinGW/msys/1.0/local/lib make make installmake install时出现错误:
C:\MinGW\bin\strip.exe: unable to rename ‘C:/MinGW/msys/1.0/local/bin/avcodec-55
.dll‘; reason: File exists
后来发现将360关了就好了
OK,烦人的编译过程已经结束,接下来就可以愉快的写测试demo了
==================================================================================================================================
1、VS2010建立VC++ win32控制台项目
2、在工程目录下建立lib目录和include目录,将已编译好的lib拷打lib下,include拷到include下,dll拷到Debug目录下,sdl2解压编译好的文件拷贝到相应目录
3、工程属性--配置属性--VC++目录--包含目录,添加ffmpeg头文件目录及其他第三方头文件目录
链接器--常规--附加库目录,添加lib目录
链接器--输入--附加依赖项,添加各个lib名
4、在include下新建文本文档 添加#define FFMPEG_VERSION "2.2.4" 并保存为version.h
5、添加ffmpeg-test.cpp
// ffmpeg-test.cpp : 定义控制台应用程序的入口点。 // #include "stdafx.h" #ifdef __cplusplus extern "C" { #endif #include <avcodec.h> #include <avdevice.h> #include <avformat.h> #include <avfilter.h> #include <avutil.h> #include <swscale.h> #include <SDL/SDL.h> #ifdef __cplusplus } #endif int _tmain(int argc, _TCHAR* argv[]) { AVFormatContext *pFormatCtx; int i, videoindex; AVCodecContext *pCodecCtx; AVCodec *pCodec; char filepath[]="nwn.mp4"; //char rtspUrl[] = "rtsp://192.168.11.19/live0.264"; //char rtspUrl[] = "rtsp://218.204.223.237:554/live/1/0547424F573B085C/gsfp90ef4k0a6iap.sdp"; char rtspUrl[] = "rtsp://211.139.194.251:554/live/2/13E6330A31193128/5iLd2iNl5nQ2s8r8.sdp"; av_register_all();//注册组件 avformat_network_init();//支持网络流 pFormatCtx = avformat_alloc_context();//初始化AVFormatContext if(avformat_open_input(&pFormatCtx,/*filepath*/rtspUrl,NULL,NULL)!=0){//打开文件 printf("无法打开文件\n"); return -1; } if(av_find_stream_info(pFormatCtx)<0)//查找流信息 { printf("Couldn't find stream information.\n"); return -1; } videoindex=-1; for(i=0; i<pFormatCtx->nb_streams; i++) //获取视频流ID if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { videoindex=i; break; } if(videoindex==-1) { printf("Didn't find a video stream.\n"); return -1; } pCodecCtx=pFormatCtx->streams[videoindex]->codec; pCodec=avcodec_find_decoder(pCodecCtx->codec_id);//查找解码器 if(pCodec==NULL) { printf("Codec not found.\n"); return -1; } if(avcodec_open2(pCodecCtx, pCodec, NULL)<0)//打开解码器 { printf("Could not open codec.\n"); return -1; } AVFrame *pFrame,*pFrameYUV; pFrame=avcodec_alloc_frame();//存储解码后AVFrame pFrameYUV=avcodec_alloc_frame();//存储转换后AVFrame(为什么要转换?后文解释) uint8_t *out_buffer; out_buffer=new uint8_t[avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height)];//分配AVFrame所需内存 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);//填充AVFrame //------------SDL初始化-------- if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf( "Could not initialize SDL - %s\n", SDL_GetError()); return -1; } SDL_Window *screen = SDL_CreateWindow("RTSP Client Demo", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_RESIZABLE/* SDL_WINDOW_HIDDEN*/| SDL_WINDOW_OPENGL); if(!screen) { printf("SDL: could not set video mode - exiting\n"); return -1; } SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0); SDL_Texture* sdlTexture = SDL_CreateTexture( sdlRenderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height); SDL_Rect rect; //----------------------------- int ret, got_picture; static struct SwsContext *img_convert_ctx; int y_size = pCodecCtx->width * pCodecCtx->height; SDL_Event event; AVPacket *packet=(AVPacket *)malloc(sizeof(AVPacket));//存储解码前数据包AVPacket av_new_packet(packet, y_size); //输出一下信息----------------------------- printf("文件信息-----------------------------------------\n"); av_dump_format(pFormatCtx,0,filepath,0); printf("-------------------------------------------------\n"); //------------------------------ while(av_read_frame(pFormatCtx, packet)>=0)//循环获取压缩数据包AVPacket { if(packet->stream_index==videoindex) { ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码。输入为AVPacket,输出为AVFrame if(ret < 0) { printf("解码错误\n"); return -1; } if(got_picture) { //像素格式转换。pFrame转换为pFrameYUV。 img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); sws_freeContext(img_convert_ctx); //------------SDL显示-------- rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_UpdateTexture( sdlTexture, &rect, pFrameYUV->data[0], pFrameYUV->linesize[0] ); SDL_RenderClear( sdlRenderer ); SDL_RenderCopy( sdlRenderer, sdlTexture, &rect, &rect ); SDL_RenderPresent( sdlRenderer ); //延时40ms SDL_Delay(20); //------------SDL----------- } } av_free_packet(packet); SDL_PollEvent(&event); switch( event.type ) { case SDL_QUIT: SDL_Quit(); exit(0); break; default: break; } } SDL_DestroyTexture(sdlTexture); delete[] out_buffer; av_free(pFrameYUV); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); return 0; }参考了网上的资料并做了修改,主要是ffmpeg接口和SDL2接口的改变,SDL2改动的接口参照 http://wiki.libsdl.org/MigrationGuide,ffmpeg改动的接口在源码目录下的doc\APIchanges文件