視頻截圖如下
使用一個單獨的線程處理和顯示視頻源的輸入幀和輸出幀,有助於使GUI線程(主線程)保持空閒及可響應性,而用第二線程處理更密集的進程。
準備工作:
創建含兩個label的QT控件應用程序,並在.pro文件中配置opencv:
INCLUDEPATH += /usr/local/include \
/usr/local/include/opencv \
/usr/local/include/opencv2
LIBS += /usr/local/lib64/libopencv*
第一種方法(官方已不推薦的老方法):子類化QThread。 思路如下:
一,創建類繼承Qthread: class videoProcessorThread : public QThread
重寫run函數,作爲新線程入口。run函數中1打開視頻,2設定時時器延時讀取視頻幀,3給主框架發送顯示幀圖片消息,4加上this->exec();支持信號槽。
二,主框架MainWindow的構造函數中,1建立顯示圖片的信號槽連接,2啓動線程。析構函數中3結束線程。
代碼如下:
videoprocessorthread.h
#ifndef VIDEOPROCESSORTHREAD_H
#define VIDEOPROCESSORTHREAD_H
#include <QObject>
#include <QThread>
#include <QPixmap>
#include <opencv2/opencv.hpp>
using namespace cv;
class videoProcessorThread : public QThread
{
Q_OBJECT
public:
explicit videoProcessorThread(QObject *parent = nullptr);
void showCamera();
void stopVideo();
signals:
void inDisplay(QPixmap pixmap);
void outDisplay(QPixmap pixmap);
public slots:
private:
void run() override;
VideoCapture camera;
Mat inFrame,outFrame;
QTimer *timer;
};
#endif // VIDEOPROCESSORTHREAD_H
videoprocessorthread.cpp
#include "videoprocessorthread.h"
#include <QDebug>
#include "QTimer"
videoProcessorThread::videoProcessorThread(QObject *parent) : QThread(parent)
{
}
void videoProcessorThread::run()
{
camera = VideoCapture("/home/jello/myprojects/images/bike.avi");
double rate = camera.get(CV_CAP_PROP_FPS);
int delay = 1000/rate;
timer = new QTimer();
connect(timer,&QTimer::timeout,this,&videoProcessorThread::showCamera);
if(camera.isOpened())
{
timer->start(delay);
this->exec();
}
}
void videoProcessorThread::showCamera()
{
camera >> inFrame;
bitwise_not(inFrame, outFrame);
emit inDisplay(
QPixmap::fromImage(
QImage(
inFrame.data,
inFrame.cols,
inFrame.rows,
inFrame.step,
QImage::Format_RGB888)
.rgbSwapped()));
emit outDisplay(QPixmap::fromImage(
QImage(
outFrame.data,
outFrame.cols,
outFrame.rows,
outFrame.step,
QImage::Format_RGB888)));
}
void videoProcessorThread::stopVideo()
{
timer->stop();
delete timer;
}
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "videoprocessorthread.h"
#include "videoprocessor.h"
namespace Ui {
class MainWindow;
}
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = nullptr);
~MainWindow();
protected:
void closeEvent(QCloseEvent *event);
private:
Ui::MainWindow *ui;
videoProcessorThread processor;
// VideoProcessor *processor1;
};
#endif // MAINWINDOW_H
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QCloseEvent>
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
connect(&processor,
SIGNAL(inDisplay(QPixmap)),
ui->inVideo,
SLOT(setPixmap(QPixmap)));
connect(&processor,
SIGNAL(outDisplay(QPixmap)),
ui->outVideo,
SLOT(setPixmap(QPixmap)));
processor.start();
}
void MainWindow::closeEvent(QCloseEvent *event)
{
processor.stopVideo();
processor.quit();
processor.wait();
event->accept();
}
MainWindow::~MainWindow()
{
delete ui;
}
第二種方法:movetothread
線程處理類直接繼承QObject
#ifndef VIDEOPROCESSOR_H
#define VIDEOPROCESSOR_H
#include <QObject>
#include <QPixmap>
#include "opencv2/opencv.hpp"
using namespace cv;
class VideoProcessor : public QObject
{
Q_OBJECT
public:
explicit VideoProcessor(QObject *parent = nullptr);
void showCamera();
signals:
void inDisplay(QPixmap pixmap);
void outDisplay(QPixmap pixmap);
public slots:
void startVideo();
void stopVideo();
private:
bool stopped;
VideoCapture camera;
Mat inFrame, outFrame;
QTimer *timer;
};
#endif // VIDEOPROCESSOR_H
構造函數中沒有分配任何父函數,有父對象的對象不能移動到新的線程中。
#include "videoprocessor.h"
#include <QTimer>
VideoProcessor::VideoProcessor(QObject *parent) : QObject(parent)
{
}
void VideoProcessor::startVideo()
{
camera = VideoCapture("/home/jello/myprojects/images/bike.avi");
stopped = false;
double rate = camera.get(CV_CAP_PROP_FPS);
int delay = 1000/rate;
timer = new QTimer();
connect(timer,&QTimer::timeout,this,&VideoProcessor::showCamera);
if(camera.isOpened())
timer->start(delay);
}
void VideoProcessor::showCamera()
{
camera >> inFrame;
bitwise_not(inFrame, outFrame);
emit inDisplay(QPixmap::fromImage(
QImage(
inFrame.data,
inFrame.cols,
inFrame.rows,
inFrame.step,
QImage::Format_RGB888)));
emit outDisplay(QPixmap::fromImage(
QImage(
outFrame.data,
outFrame.cols,
outFrame.rows,
outFrame.step,
QImage::Format_RGB888)));
}
void VideoProcessor::stopVideo()
{
timer->stop();
delete timer;
}
主框架中創建VideoProcessor的實例,並定義爲指針。
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "videoprocessor.h"
namespace Ui {
class MainWindow;
}
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = nullptr);
~MainWindow();
private:
Ui::MainWindow *ui;
VideoProcessor *processor1;
};
#endif // MAINWINDOW_H
不應該直接調用VideoProcessor的startVideo函數,而應該將一個適合的信號連接到它進行調用。線程結束信號連接到deleteLater槽函數。
#include "mainwindow.h"
#include "ui_mainwindow.h"
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
processor1 = new VideoProcessor();
processor1->moveToThread(new QThread(this));
connect(processor1->thread(),SIGNAL(started()),processor1,SLOT(startVideo()));
connect(processor1->thread(),SIGNAL(finnished()),processor1,SLOT(deleteLater()));
connect(processor1,SIGNAL(inDisplay(QPixmap)),ui->inVideo,SLOT(setPixmap(QPixmap)));
connect(processor1,SIGNAL(outDisplay(QPixmap)),ui->outVideo,SLOT(setPixmap(QPixmap)));
processor1->thread()->start();
}
MainWindow::~MainWindow()
{
processor1->stopVideo();
processor1->thread()->quit();
processor1->thread()->wait();
delete ui;
}