一.yuv文件準備
找到一個mp4文件,然後用FFmpeg命令將它轉換成yuv文件,這裏使用一個野生動物視頻Wildlife.mp4。
首先要使用ffmpeg查看mp4文件的一些信息,比如編碼器類型、像素格式、分辨率、幀率等。
ffmpeg -i Wildlife.mp4
從上圖所示的信息,可以看到像素格式是yuv420p、分辨率是1280x720、幀率是29.97,所以這裏直接轉換成yuv文件即可,不需要添加轉換像素格式的參數。
ffmpeg -i Wildlife.mp4 Wildlife.yuv
轉換完成後,ffplay播放一下。
ffplay -f rawvideo -video_size 1280x720 -i Wildlife.yuv
ok,沒問題。
二.SDL渲染視頻
#include "QHSDLVideoRender.h"
#include <QDebug>
QHSDLVideoRender::QHSDLVideoRender()
: m_screen(nullptr)
, m_render(nullptr)
, m_texture(nullptr)
{
}
QHSDLVideoRender::~QHSDLVideoRender()
{
}
void QHSDLVideoRender::initialRender(void *winID,int width,int height)
{
m_videoWidth=width;
m_videoHeight=height;
if(SDL_Init(SDL_INIT_VIDEO)<0)
{
qDebug()<<"SDL_Init(): "<<SDL_GetError();
}
m_screen = SDL_CreateWindowFrom(winID);
if(!m_screen)
{
qDebug()<<"SDL_CreateWindowFrom(): "<<SDL_GetError();
}
m_render = SDL_CreateRenderer(m_screen, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
if (!m_render)
{
qDebug()<<"Failed to initialize a hardware accelerated renderer: "<<SDL_GetError();
m_render = SDL_CreateRenderer(m_screen, -1, 0);
if(!m_render)
{
qDebug()<<"SDL_CreateRenderer(): "<<SDL_GetError();
}
}
m_texture = SDL_CreateTexture(m_render, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, width, height);
if(!m_texture)
{
qDebug()<<"SDL_CreateTexture(): "<<SDL_GetError();
}
SDL_ShowWindow(m_screen);
}
void QHSDLVideoRender::destroyRender()
{
if(m_render)
{
SDL_DestroyRenderer(m_render);
m_render = nullptr;
}
if(m_texture)
{
SDL_DestroyTexture(m_texture);
m_texture = nullptr;
}
if(m_screen)
{
SDL_DestroyWindow(m_screen);
m_screen = nullptr;
}
SDL_Quit();
}
void QHSDLVideoRender::playYuvData(unsigned char *buffer,int bufferSize)
{
int srcWidth=m_videoWidth;
int srcHeight=m_videoHeight;
SDL_Rect srcRect = {0, 0, srcWidth, srcHeight};
int dstWidth=0;
int dstHeight=0;
SDL_GetWindowSize(m_screen, &dstWidth, &dstHeight);
SDL_Rect dstRect;
// keep ratio
double srcRatio=srcWidth*1.0/srcHeight;
double dstRatio=dstWidth*1.0/dstHeight;
if(srcRatio>dstRatio)
{
dstRect.x=0;
dstRect.y=(dstHeight-dstWidth/srcRatio)/2;
dstRect.w=dstWidth;
dstRect.h=dstWidth/srcRatio;
}
else
{
dstRect.x=(dstWidth-dstHeight*srcRatio)/2;
dstRect.y=0;
dstRect.w=dstHeight*srcRatio;
dstRect.h=dstHeight;
}
int ret = SDL_UpdateTexture(m_texture,nullptr,buffer,srcWidth);
if(ret != 0)
{
qDebug()<<"SDL_UpdateTexture(): "<<SDL_GetError();
}
SDL_RenderClear(m_render);
SDL_RenderCopy(m_render, m_texture, &srcRect, &dstRect);
SDL_RenderPresent(m_render);
}
#include "QHSDLVideoRender.h"
#include "QHD3DVideoRender.h"
#include <QDebug>
QHVideoPlayer::QHVideoPlayer(QWidget *parent) :
QWidget(parent),
ui(new Ui::QHVideoPlayer)
{
ui->setupUi(this);
enableUpdate(false);
m_fileName="Wildlife.yuv";
m_winID=(void *)ui->widget->winId();
m_videoRender=new QHSDLVideoRender();
// m_videoRender=new QHD3DVideoRender();
SDL_CreateThread(videoReadThread,"VideoRead",this);
}
QHVideoPlayer::~QHVideoPlayer()
{
delete ui;
m_videoRender->destroyRender();
}
void QHVideoPlayer::enableUpdate(bool enable)
{
ui->widget->setUpdatesEnabled(enable);
}
int QHVideoPlayer::videoReadThread(void *userData)
{
QHVideoPlayer *player=(QHVideoPlayer *)userData;
player->m_videoRender->initialRender(player->m_winID,VIDEO_WIDTH,VIDEO_HEIGHT);
FILE *pFile = fopen(player->m_fileName.toStdString().c_str(), "rb");
if (pFile == nullptr)
{
qDebug()<<"fopen failed";
}
// Y+U+V
int bufferSize = VIDEO_WIDTH*VIDEO_HEIGHT+VIDEO_WIDTH*VIDEO_HEIGHT/4+VIDEO_WIDTH*VIDEO_HEIGHT/4;
uint8_t *buffer = (uint8_t *) malloc(bufferSize);
while (true)
{
if (fread(buffer, 1, bufferSize, pFile) != bufferSize)
{
qDebug()<<"end of file";
break;
}
player->m_videoRender->playYuvData(buffer,bufferSize);
SDL_Delay(1000/29.97);
}
player->m_videoRender->destroyRender();
player->enableUpdate(false);
return 0;
}
流程很簡單,但有幾個細節需要注意下:
1.initialRender和playYuvData的調用必須在同一個線程中,否則窗口縮放的時候會提示Texture無效。
2.windows上可用通過hwnd直接獲取到窗口尺寸SDL_GetWindowSize(m_screen, &dstWidth, &dstHeight),但是linux下通過此方法無法獲取。linux下需要在窗口大小變化時主動調用SDL_SetWindowSize(m_screen, width, height)來告知SDL。
3.mac下調用SDL_CreateWindowFrom直接崩潰,所以mac下是用QOpenGLWidget來渲染的。
4.傳入的hwnd不能是主窗口的,否則縮放時,標題欄和邊框都會被刷掉。
5.initialRender最後調用了SDL_ShowWindow(m_screen),防止SDL釋放後再初始化時無法顯示圖像。
三.D3D渲染視頻
#include "QHD3DVideoRender.h"
#include <QDebug>
QHD3DVideoRender::QHD3DVideoRender()
: m_d3d(nullptr)
, m_d3dDevice(nullptr)
, m_d3dSurface(nullptr)
, m_backBufferSurface(nullptr)
{
}
QHD3DVideoRender::~QHD3DVideoRender()
{
}
void QHD3DVideoRender::initialRender(void *winID,int width,int height)
{
m_videoWidth = width;
m_videoHeight = height;
m_d3d = Direct3DCreate9(D3D_SDK_VERSION);
if (m_d3d == nullptr)
{
qDebug()<<"Direct3DCreate9 failed";
}
D3DDISPLAYMODE d3dDisplayMode;
m_d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3dDisplayMode);
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.hDeviceWindow = (HWND)winID;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.BackBufferFormat = d3dDisplayMode.Format;
d3dpp.BackBufferWidth = d3dDisplayMode.Width;
d3dpp.BackBufferHeight = d3dDisplayMode.Height;
d3dpp.EnableAutoDepthStencil = FALSE;
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
d3dpp.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
HRESULT ret = m_d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, (HWND)winID, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &m_d3dDevice);
if (FAILED(ret))
{
qDebug()<<"CreateDevice failed";
}
ret = m_d3dDevice->CreateOffscreenPlainSurface(m_videoWidth, m_videoHeight, (D3DFORMAT)MAKEFOURCC('Y', 'V', '1', '2'), D3DPOOL_DEFAULT, &m_d3dSurface, nullptr);
if (FAILED(ret))
{
qDebug()<<"CreateOffscreenPlainSurface failed";
}
m_d3dDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &m_backBufferSurface);
if (!m_backBufferSurface)
{
qDebug()<<"GetBackBuffer failed";
}
}
void QHD3DVideoRender::destroyRender()
{
if (m_d3d)
{
m_d3d->Release();
m_d3d = nullptr;
}
if (m_d3dDevice)
{
m_d3dDevice->Release();
m_d3dDevice = nullptr;
}
if (m_d3dSurface)
{
m_d3dSurface->Release();
m_d3dSurface = nullptr;
}
if (m_backBufferSurface)
{
m_backBufferSurface->Release();
m_backBufferSurface = nullptr;
}
}
void QHD3DVideoRender::playYuvData(unsigned char *buffer,int bufferSize)
{
if (m_d3dDevice&&m_d3dSurface)
{
D3DLOCKED_RECT lockRect;
HRESULT ret = m_d3dSurface->LockRect(&lockRect, nullptr, D3DLOCK_DONOTWAIT);
if (FAILED(ret))
{
qDebug()<<"LockRect failed";
}
BYTE *p = (BYTE *)lockRect.pBits;
int stride = lockRect.Pitch;
//copy data (YUV420P)
int i = 0;
for (i = 0; i < m_videoHeight; i++)
memcpy(p + i * stride, buffer + i * m_videoWidth, m_videoWidth);
for (i = 0; i < m_videoHeight/2 ; i++)
memcpy(p + stride * m_videoHeight + i * stride / 2, buffer + m_videoWidth * m_videoHeight + m_videoWidth * m_videoHeight/4+ i * m_videoWidth / 2, m_videoWidth/2);
for (i = 0; i < m_videoHeight/2 ; i++)
memcpy(p + stride * m_videoHeight + stride * m_videoHeight / 4 + i * stride / 2, buffer + m_videoWidth * m_videoHeight + i * m_videoWidth / 2, m_videoWidth/2);
ret = m_d3dSurface->UnlockRect();
if (FAILED(ret))
{
qDebug()<<"UnlockRect failed";
}
m_d3dDevice->Clear(0, nullptr, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0,0,0), 1.0f, 0);
if (SUCCEEDED(m_d3dDevice->BeginScene()))
{
m_d3dDevice->StretchRect(m_d3dSurface, nullptr, m_backBufferSurface, nullptr, D3DTEXF_LINEAR);
m_d3dDevice->EndScene();
}
m_d3dDevice->Present(nullptr, nullptr, nullptr, nullptr);
}
}
除了用surface方式渲染,還可以通過紋理渲染,這位博主總結的比較到位:https://www.jianshu.com/p/6e0980a88c4c
有同學反饋surface方式渲染出來的視頻清晰度沒有本來的視頻清晰度高,特別是放大後文字出現馬賽克現象。
原因1.backBufferSurface的寬高沒指定,或指定的不夠大,即下面這兩句:
d3dpp.BackBufferWidth = d3dDisplayMode.Width;
d3dpp.BackBufferHeight = d3dDisplayMode.Height;
原因2.StretchRect的第四個參數,也就是RECT* pDestRect可以設置爲nullptr,設置爲nullptr表示全屏拉伸填充渲染,即下面這句:
m_d3dDevice->StretchRect(m_d3dSurface, nullptr, m_backBufferSurface, nullptr, D3DTEXF_LINEAR);
源碼鏈接:https://download.csdn.net/download/caoshangpa/14075645
原創不易,轉載請標明出處:https://blog.csdn.net/caoshangpa/article/details/112395101