https://blog.csdn.net/asahinokawa/article/details/80596655 YUV存儲格式
// QImage 的構造:QImage image(rgb_buffer, 1920, 1080, QImage::Format_RGB32);
void writeRGB32BufferToYuv420P(QImage& image)
{
// image的尺寸
int imageWidth = image.width();
int imageHeight = image.height();
// 計算AV_PIX_FMT_RGB32格式化 imageWidth*imageHeight大小的image,所佔內存大小,並分配緩衝區rgbBuffer保存image.bits()
int numbytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, imageWidth, imageHeight, 1);//1920*1080*4
uint8_t *rgbBuffer = (uint8_t*)av_malloc(numbytes*sizeof(uint8_t));
// 分配一幀內存,並關聯到rgbBuffer,pRgbFrame->data[0]保存image.bits(),pRgbFrame->linesize[0]保存image的一行內存大小
uint8_t* rgbDataBuffer[4];
int rgbDataLinesize[4];
int ret = av_image_fill_arrays(rgbDataBuffer, rgbDataLinesize, rgbBuffer, AV_PIX_FMT_RGB32,imageWidth,imageHeight,1);
rgbDataBuffer[0]=image.bits();// 內存拷貝
qDebug()<<"rgb linesize[0]:"<<rgbDataLinesize[0]<<endl; // 1920*4
qDebug()<<"rgb cmp:"<<(!memcmp(rgbDataBuffer[0],image.bits(), numbytes));
// 分配一幀yuv420內存
uint8_t* yuvDataBuffer[4];
int yuvDataLinesize[4];
int numbytes_yuv = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, imageWidth, imageHeight, 1);
uint8_t *buffer_yuv = (uint8_t*)av_malloc(numbytes_yuv*sizeof(uint8_t));
int ret_yuv = av_image_fill_arrays(yuvDataBuffer, yuvDataLinesize, buffer_yuv, AV_PIX_FMT_YUV420P,imageWidth,imageHeight,1);
qDebug()<<"yuv number:"<<numbytes_yuv<<endl; // 1920*1080*3/2
qDebug()<<"yuv-Y linesize[0]:"<<yuvDataLinesize[0]<<endl; // 1920
qDebug()<<"yuv-U linesize[1]:"<<yuvDataLinesize[1]<<endl; // 960
qDebug()<<"yuv-V linesize[1]:"<<yuvDataLinesize[2]<<endl; // 960
// 顏色編碼轉換器
SwsContext *rgb_to_yuv_ctx = sws_getContext(imageWidth, imageHeight, AV_PIX_FMT_RGB32,
imageWidth, imageHeight, AV_PIX_FMT_YUV420P,
SWS_BICUBIC, NULL,NULL,NULL);
// 顏色編碼開始轉換
ret = sws_scale(rgb_to_yuv_ctx, rgbDataBuffer, rgbDataLinesize, 0, imageHeight, yuvDataBuffer, yuvDataLinesize);
if (ret < 0)
{
qDebug() << "error4";
sws_freeContext(rgb_to_yuv_ctx);
av_free(rgbBuffer);
av_free(buffer_yuv);
return;
}
FILE *output=fopen("H:\\common_src\\FFmpeg_QT_rtsp\\h264\\me_0.yuv","ab+");
fwrite(yuvDataBuffer[0],imageWidth*imageHeight,1,output);
fwrite(yuvDataBuffer[1],imageWidth*imageHeight/4,1,output);
fwrite(yuvDataBuffer[2],imageWidth*imageHeight/4,1,output);
fclose(output);
sws_freeContext(rgb_to_yuv_ctx);
av_free(rgbBuffer);
av_free(buffer_yuv);
}