使用FFMPeg解碼rtsp協議的視屏流,並使用openGL渲染,
1.之前有使用VLC解碼但是不能滿足需求,後來直接使用FFmpeg解碼openGL渲染
2.使用FFMpeg解碼這裏解決了,解碼後不能拿到視屏流多出顯示的問題
3.解決了打開首幀視屏比較慢的問題,現在1-2內既能打開視屏
4.解決了找不到pps報警告的問題
5.多線程優化,解碼的同時不影響UI的操作更新
解碼顯示可以轉爲圖片貼圖顯示,和使用openGL渲染兩種方式顯示
使用前準備
1.首先編譯FFMpeg源碼,集成ios的庫,添加到工程中並導入相應的庫,編譯集成FFmpeg
我將FFMpeg的解碼代碼封裝在一個文件中,方便使用,解碼源碼文件下載地址
首先展示轉爲圖片顯示的
#import "CQMovieView.h"
#import "OpenglView.h"
#define LERP(A,B,C) ((A)*(1.0-C)+(B)*C)
@interface CQMovieView ()
@property (nonatomic, copy) NSString *cruutenPath;
@property (nonatomic ,strong)NSMutableArray *mutArray;
@property (nonatomic,strong)OpenglView *openglView;
@end
@implementation CQMovieView
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFream;
AVStream *stream;
AVPacket *packet;
int i,videoIndex;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.frame = frame;
_mutArray = [NSMutableArray array];
_isdecoder = YES;
}
return self;
}
- (void)replaceTheResources:(NSString *)moviePath{
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)Video:(NSString *)moviePath
{
NSLog(@"%@",moviePath);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_SERIAL);
queue = dispatch_get_global_queue(0,DISPATCH_QUEUE_PRIORITY_DEFAULT);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_CONCURRENT);
self.cruutenPath = [moviePath copy];
self.ImageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:self.ImageView];
// self.openglView = [[OpenglView alloc]initWithFrame:self.bounds];
// [self addSubview:self.openglView];
// [self.openglView setVideoSize:self.bounds.size.width height:self.bounds.size.height];
NSLog(@"===%@",moviePath);
assert(moviePath);
dispatch_async(queue, ^{
NSString *path = [moviePath stringByAppendingString:@"0200012000c8000f"];
[self initializeResources:[path UTF8String]];
});
}
- (void)initializeResources:(const char *)pathUrl{
// dispatch_async(queue, ^{
//初始化所有組件
av_register_all();
avcodec_register_all();
//聲明上下文
// AVFormatContext *pFormatCtx;
//初始化上下文
pFormatCtx = avformat_alloc_context();
avformat_network_init();
//dispatch_async(queue, ^{
//獲取文件路徑
const char * path = pathUrl;
if (path == NULL) {
printf("無法找到文件路徑/n");
return ;
}
//這裏設置檢測空間,解決打開視屏慢的問題
AVDictionary *options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
pFormatCtx->probesize = 1000;
pFormatCtx->max_analyze_duration2 = 0;//3 * AV_TIME_BASE;
//聲明解碼器類型
AVCodec *pCodec;
//查找解碼器
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
// int i,videoIndex;
videoIndex = -1;
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
uint8_t *ut = malloc(32);
pCodecCtx->extradata = ut;
pCodecCtx->extradata_size = 32;
//給extradata成員參數設置值,解決找不到pps報警告的問題
//00 00 00 01
pCodecCtx->extradata[0] = 0x00;
pCodecCtx->extradata[1] = 0x00;
pCodecCtx->extradata[2] = 0x00;
pCodecCtx->extradata[3] = 0x01;
//67 42 80 1e
pCodecCtx->extradata[4] = 0x67;
pCodecCtx->extradata[5] = 0x42;
pCodecCtx->extradata[6] = 0x80;
pCodecCtx->extradata[7] = 0x1e;
//88 8b 40 50
pCodecCtx->extradata[8] = 0x88;
pCodecCtx->extradata[9] = 0x8b;
pCodecCtx->extradata[10] = 0x40;
pCodecCtx->extradata[11] = 0x50;
//1e d0 80 00
pCodecCtx->extradata[12] = 0x1e;
pCodecCtx->extradata[13] = 0xd0;
pCodecCtx->extradata[14] = 0x80;
pCodecCtx->extradata[15] = 0x00;
//03 84 00 00
pCodecCtx->extradata[16] = 0x03;
pCodecCtx->extradata[17] = 0x84;
pCodecCtx->extradata[18] = 0x00;
pCodecCtx->extradata[19] = 0x00;
//af c8 02 00
pCodecCtx->extradata[20] = 0xaf;
pCodecCtx->extradata[21] = 0xc8;
pCodecCtx->extradata[22] = 0x02;
pCodecCtx->extradata[23] = 0x00;
//00 00 00 01
pCodecCtx->extradata[24] = 0x00;
pCodecCtx->extradata[25] = 0x00;
pCodecCtx->extradata[26] = 0x00;
pCodecCtx->extradata[27] = 0x01;
//68 ce 38 80
pCodecCtx->extradata[28] = 0x68;
pCodecCtx->extradata[29] = 0xce;
pCodecCtx->extradata[30] = 0x38;
pCodecCtx->extradata[31] = 0x80;
//打開視頻流
if(avformat_open_input(&pFormatCtx,path,NULL,&options)!=0){
NSLog(@"不能打開流");
return ;
}
//查看視頻流信息
if(avformat_find_stream_info(pFormatCtx,&options)<0){
NSLog(@"不能成功查看視頻流信息");
return ;
}
// });
//對上下文中的視頻流進行遍歷
for (i = 0; i<pFormatCtx->nb_streams; i++) {
//找到視頻流信息後跳出循環 && pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO ){
videoIndex=i;
break;
}
}
//若videoIndex還爲初值那麼說明沒有找到視頻流
if(videoIndex==-1){
NSLog(@"沒有找到視頻流");
return ;
}
//聲明編碼器上下文結構體
//這裏新版本不再使用AVCodecContext這個結構體了,具體原因我也不太清楚,好像是pFormatCtx->streams過於臃腫
// AVCodecContext * pCodecCtx;
if (pCodec == NULL) {
NSLog(@"解碼器沒找到");
return;
}
//打開解碼器
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
NSLog(@"解碼器打開失敗");
return;
}
//解碼後的數據
// AVFrame *pFream,*pFreamYUV;
pFream = av_frame_alloc();
// pFreamYUV = av_frame_alloc();
uint8_t *out_buffer;
//開闢空間
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
/*******************************輸出信息*********************************************/
NSLog(@"--------------- File Informatin ----------------");
//打印視頻信息,av_dump_format()是一個手工調試的函數,能使我們看到pFormatCtx->streams裏面有什麼內容
av_dump_format(pFormatCtx, 0, path, 0);
NSLog(@"-------------------------------------------------");
//主要用來對圖像進行變化,這裏是爲了縮放,把黑邊裁去
struct SwsContext * img_convert_ctx;
/**
該函數包含以下參數:
srcW:源圖像的寬
srcH:源圖像的高
srcFormat:源圖像的像素格式
dstW:目標圖像的寬
dstH:目標圖像的高
dstFormat:目標圖像的像素格式
flags:設定圖像拉伸使用的算法
成功執行的話返回生成的SwsContext,否則返回NULL。
// */
// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
// pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// free(audioBuffer);
free(ut);
//解碼序號
int got_picture_ptr = 0;
//循環讀取每一幀
while (_isdecoder) {
if ( av_read_frame(pFormatCtx, packet)>=0) {
//若爲視頻流信息
if (packet->stream_index == videoIndex) {
//解碼一幀數據,輸入一個壓縮編碼的結構體AVPacket,輸出一個解碼後的結構體AVFrame。
int ret = avcodec_decode_video2(pCodecCtx, pFream, &got_picture_ptr, packet);
// ret1 = avcodec_decode_audio4(pCodecCtx, frames, &got_frame, packet);
//當解碼失敗
// if (ret < 0) {
// NSLog(@"解碼失敗");
// return;
// }
// if (got_frame) {
// swr_convert(swrCtx, &audioBuffer, 2 * 44100, frames->data, frames->nb_samples);
// NSData *data = [NSData dataWithBytes:fp_pcm length:pFream->height*pFream->width*3/2];
// [player playWithData:data];
// }
/*
char *yuvdata = malloc(pFream->height*pFream->width*3/2);
int i;
for (i = 0; i < pFream->height; i++) {
memcpy(yuvdata + i * pFream->width,
pFream->data[0] + i * pFream->linesize[0], pFream->width);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width + i * pFream->width / 2,
pFream->data[1] + i * pFream->linesize[1], pFream->width / 2);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width*5/4 + i * pFream->width / 2,
pFream->data[2] + i * pFream->linesize[2], pFream->width / 2);
}
[self.openglView displayYUV420pData:yuvdata width:pFream->width height:pFream->height];
free(yuvdata);
*/
avpicture_free(&picture);
avpicture_alloc(&picture, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
struct SwsContext *imgConverCtx = sws_getContext(pFream->width,
pFream->height,
AV_PIX_FMT_YUV420P,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGB24,
SWS_FAST_BILINEAR,
NULL,
NULL,
NULL);
if(imgConverCtx == nil){
return ;
}
sws_scale(imgConverCtx,
pFream->data,
pFream->linesize,
0,
pFream->height,
picture.data,
picture.linesize);
sws_freeContext(imgConverCtx);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreate(kCFAllocatorDefault, picture.data[0], picture.linesize[0] * pCodecCtx->height);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(pCodecCtx->width, pCodecCtx->height, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(provider);
CFRelease(data);
dispatch_async(dispatch_get_main_queue(),^{
// self.ImageView.image = image; //在本view中顯示
// [[NSNotificationCenter defaultCenter]postNotificationName:self.cruutenPath object:image]; //將數據傳出,可以在多個地方顯示同意畫面,不需多次解碼而顯示同意畫面
});
}
//銷燬packet
av_free_packet(packet);
avpicture_free(&picture);
}
//銷燬
// sws_freeContext(img_convert_ctx);
//// av_frame_free(&pFreamYUV);
//
// av_frame_free(&pFream);
//
// avcodec_close(pCodecCtx);
//
// avformat_close_input(&pFormatCtx);
// });
}
}
//暫停
- (void)pause{
_isdecoder = NO;
}
//播放
-(void)play{
_isdecoder = YES;
}
- (void)releaseResources {
NSLog(@"釋放資源");
// SJLogFunc
isReleaseResources = YES;
// 釋放RGB
av_packet_unref(&packet);
// 釋放frame
// av_packet_unref(&packet);
// 釋放YUV frame
if (pFream) {
av_free(pFream);
}
// 關閉解碼器
if (pCodecCtx != nil){
avcodec_close(pCodecCtx);
}
// 關閉文件
if (pFormatCtx) avformat_close_input(&pFormatCtx);
avformat_network_deinit();
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter]removeObserver:self];
[self releaseResources];
}
@end
下面展示使用openGL渲染的代碼
#import "CQMovieView.h"
#import "OpenglView.h"
#define LERP(A,B,C) ((A)*(1.0-C)+(B)*C)
@interface CQMovieView ()
@property (nonatomic, copy) NSString *cruutenPath;
@property (nonatomic ,strong)NSMutableArray *mutArray;
@property (nonatomic,strong)OpenglView *openglView;
@end
@implementation CQMovieView
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFream;
AVStream *stream;
AVPacket *packet;
int i,videoIndex;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.frame = frame;
_mutArray = [NSMutableArray array];
_isdecoder = YES;
}
return self;
}
- (void)replaceTheResources:(NSString *)moviePath{
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)Video:(NSString *)moviePath
{
NSLog(@"%@",moviePath);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_SERIAL);
queue = dispatch_get_global_queue(0,DISPATCH_QUEUE_PRIORITY_DEFAULT);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_CONCURRENT);
self.cruutenPath = [moviePath copy];
self.ImageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:self.ImageView];
self.openglView = [[OpenglView alloc]initWithFrame:self.bounds];
[self addSubview:self.openglView];
[self.openglView setVideoSize:self.bounds.size.width height:self.bounds.size.height];
NSLog(@"===%@",moviePath);
assert(moviePath);
dispatch_async(queue, ^{
NSString *path = [moviePath stringByAppendingString:@"0200012000c8000f"];
[self initializeResources:[path UTF8String]];
});
}
- (void)initializeResources:(const char *)pathUrl{
// dispatch_async(queue, ^{
//初始化所有組件
av_register_all();
avcodec_register_all();
//聲明上下文
// AVFormatContext *pFormatCtx;
//初始化上下文
pFormatCtx = avformat_alloc_context();
avformat_network_init();
//dispatch_async(queue, ^{
//獲取文件路徑
const char * path = pathUrl;
if (path == NULL) {
printf("無法找到文件路徑/n");
return ;
}
AVDictionary *options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
pFormatCtx->probesize = 1000;
pFormatCtx->max_analyze_duration2 = 0;//3 * AV_TIME_BASE;
//聲明解碼器類型
AVCodec *pCodec;
//查找解碼器
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
// int i,videoIndex;
videoIndex = -1;
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
uint8_t *ut = malloc(32);
pCodecCtx->extradata = ut;
pCodecCtx->extradata_size = 32;
//給extradata成員參數設置值
//00 00 00 01
pCodecCtx->extradata[0] = 0x00;
pCodecCtx->extradata[1] = 0x00;
pCodecCtx->extradata[2] = 0x00;
pCodecCtx->extradata[3] = 0x01;
//67 42 80 1e
pCodecCtx->extradata[4] = 0x67;
pCodecCtx->extradata[5] = 0x42;
pCodecCtx->extradata[6] = 0x80;
pCodecCtx->extradata[7] = 0x1e;
//88 8b 40 50
pCodecCtx->extradata[8] = 0x88;
pCodecCtx->extradata[9] = 0x8b;
pCodecCtx->extradata[10] = 0x40;
pCodecCtx->extradata[11] = 0x50;
//1e d0 80 00
pCodecCtx->extradata[12] = 0x1e;
pCodecCtx->extradata[13] = 0xd0;
pCodecCtx->extradata[14] = 0x80;
pCodecCtx->extradata[15] = 0x00;
//03 84 00 00
pCodecCtx->extradata[16] = 0x03;
pCodecCtx->extradata[17] = 0x84;
pCodecCtx->extradata[18] = 0x00;
pCodecCtx->extradata[19] = 0x00;
//af c8 02 00
pCodecCtx->extradata[20] = 0xaf;
pCodecCtx->extradata[21] = 0xc8;
pCodecCtx->extradata[22] = 0x02;
pCodecCtx->extradata[23] = 0x00;
//00 00 00 01
pCodecCtx->extradata[24] = 0x00;
pCodecCtx->extradata[25] = 0x00;
pCodecCtx->extradata[26] = 0x00;
pCodecCtx->extradata[27] = 0x01;
//68 ce 38 80
pCodecCtx->extradata[28] = 0x68;
pCodecCtx->extradata[29] = 0xce;
pCodecCtx->extradata[30] = 0x38;
pCodecCtx->extradata[31] = 0x80;
//打開視頻流
if(avformat_open_input(&pFormatCtx,path,NULL,&options)!=0){
NSLog(@"不能打開流");
return ;
}
//查看視頻流信息
if(avformat_find_stream_info(pFormatCtx,&options)<0){
NSLog(@"不能成功查看視頻流信息");
return ;
}
// });
//對上下文中的視頻流進行遍歷
for (i = 0; i<pFormatCtx->nb_streams; i++) {
//找到視頻流信息後跳出循環 && pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO ){
videoIndex=i;
break;
}
}
//若videoIndex還爲初值那麼說明沒有找到視頻流
if(videoIndex==-1){
NSLog(@"沒有找到視頻流");
return ;
}
//聲明編碼器上下文結構體
//這裏新版本不再使用AVCodecContext這個結構體了,具體原因我也不太清楚,好像是pFormatCtx->streams過於臃腫
// AVCodecContext * pCodecCtx;
if (pCodec == NULL) {
NSLog(@"解碼器沒找到");
return;
}
//打開解碼器
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
NSLog(@"解碼器打開失敗");
return;
}
//解碼後的數據
// AVFrame *pFream,*pFreamYUV;
pFream = av_frame_alloc();
// pFreamYUV = av_frame_alloc();
uint8_t *out_buffer;
//開闢空間
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
/*******************************輸出信息*********************************************/
NSLog(@"--------------- File Informatin ----------------");
//打印視頻信息,av_dump_format()是一個手工調試的函數,能使我們看到pFormatCtx->streams裏面有什麼內容
av_dump_format(pFormatCtx, 0, path, 0);
NSLog(@"-------------------------------------------------");
//主要用來對圖像進行變化,這裏是爲了縮放,把黑邊裁去
struct SwsContext * img_convert_ctx;
/**
該函數包含以下參數:
srcW:源圖像的寬
srcH:源圖像的高
srcFormat:源圖像的像素格式
dstW:目標圖像的寬
dstH:目標圖像的高
dstFormat:目標圖像的像素格式
flags:設定圖像拉伸使用的算法
成功執行的話返回生成的SwsContext,否則返回NULL。
// */
// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
// pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// free(audioBuffer);
free(ut);
//解碼序號
int got_picture_ptr = 0;
//循環讀取每一幀
while (av_read_frame(pFormatCtx, packet)>=0) {
//若爲視頻流信息
if (packet->stream_index == videoIndex) {
//解碼一幀數據,輸入一個壓縮編碼的結構體AVPacket,輸出一個解碼後的結構體AVFrame。
int ret = avcodec_decode_video2(pCodecCtx, pFream, &got_picture_ptr, packet);
// ret1 = avcodec_decode_audio4(pCodecCtx, frames, &got_frame, packet);
//當解碼失敗
if (ret < 0) {
NSLog(@"解碼失敗");
return;
}
// if (got_frame) {
// swr_convert(swrCtx, &audioBuffer, 2 * 44100, frames->data, frames->nb_samples);
// NSData *data = [NSData dataWithBytes:fp_pcm length:pFream->height*pFream->width*3/2];
// [player playWithData:data];
// }
//開闢空間存放yuv
char *yuvdata = malloc(pFream->height*pFream->width*3/2);
//下面三個for是對yuv數據查找拼接位置 y = u * 2 v,u= y/2
int i;
for (i = 0; i < pFream->height; i++) {
memcpy(yuvdata + i * pFream->width,
pFream->data[0] + i * pFream->linesize[0], pFream->width);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width + i * pFream->width / 2,
pFream->data[1] + i * pFream->linesize[1], pFream->width / 2);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width*5/4 + i * pFream->width / 2,
pFream->data[2] + i * pFream->linesize[2], pFream->width / 2);
}
[self.openglView displayYUV420pData:yuvdata width:pFream->width height:pFream->height];
NSLog(@"====開始獲取流33333333=====%@",[NSDate date]);
free(yuvdata);
}
//銷燬packet
av_free_packet(packet);
}
// sws_freeContext(img_convert_ctx);
//// av_frame_free(&pFreamYUV);
//
// av_frame_free(&pFream);
//
// avcodec_close(pCodecCtx);
//
// avformat_close_input(&pFormatCtx);
// });
}
}
//暫停
- (void)pause{
_isdecoder = NO;
}
//播放
-(void)play{
_isdecoder = YES;
}
- (void)releaseResources {
NSLog(@"釋放資源");
// SJLogFunc
isReleaseResources = YES;
// 釋放RGB
av_packet_unref(&packet);
// 釋放frame
// av_packet_unref(&packet);
// 釋放YUV frame
if (pFream) {
av_free(pFream);
}
// 關閉解碼器
if (pCodecCtx != nil){
avcodec_close(pCodecCtx);
}
// 關閉文件
if (pFormatCtx) avformat_close_input(&pFormatCtx);
avformat_network_deinit();
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter]removeObserver:self];
[self releaseResources];
}
@end
轉爲圖片顯示和openGL渲染顯示兩種樣式的解碼部分是一樣的,不同點在讀取流處理的部分,即While循環裏的處理部分