使用live555視頻監控

使用live555作視頻監控

1 代碼目錄

.
├── main.cpp
├── Makefile
├── videoMonitor.cpp
├── videoMonitor.h
├── x264Encoder.cpp
└── x264Encoder.h

2 代碼結構

結構很簡單,我將所有工作封裝到class VideoMonitor中,該類對外提供簡單工作接口,包括類初始化、開始視頻監控、停止視頻監控,總共用到了兩個子線程,一個用來讀取攝象頭數據,編碼寫入到FIFO;另外一個線程用來啓動live555,實現推流工作。

類圖和流程圖

3主要源碼

main.cpp

  1 #include <stdio.h>
  2 #include <unistd.h>
  3 #include <stdlib.h>
  4 
  5 #include "videoMonitor.h"
  6 
  7 VideoMonitor VM;
  8 
  9 int main()
 10 {
 11         VM.init();
 12 
 13         int fun;
 14         while(1){
 15                 scanf("%d", &fun);
 16 
 17                 switch(fun){
 18                         case 0:
 19                                 if(VM.startMonitor() == 0){
 20                                         printf("start monitor!\n");
 21                                 }
 22                                 break;
 23                         case 1:
 24                                 if(VM.stopMonitor() == 0){
 25                                         printf("stop monitor!\n");
 26                                 }
 27                                 break;
 28                         default:
 29                                 break;
 30                 }
 31         }
 32 
 33         return 0;
 34 }

main.cpp

代碼很簡單,就是實例化VideoMonitor,並通過輸入0,1控制監控的起停。

videoMonitor.h

  1 #ifndef _VIDEO_MONITOR_H_
  2 #define _VIDEO_MONITOR_H_
  3 
  4 #include <stdio.h>
  5 #include <stdlib.h>
  6 #include <unistd.h>
  7 #include <pthread.h>
  8 
  9 //file
 10 #include <sys/types.h>
 11 #include <sys/stat.h>
 12 #include <fcntl.h>
 13 
 14 //opencv
 15 #include <cxcore.h>
 16 #include <highgui.h>
 17 #include <cv.h>
 18 
 19 
 20 //live555
 21 #include <liveMedia.hh>
 22 #include <BasicUsageEnvironment.hh>
 23 #include <GroupsockHelper.hh>
 24 #include <sys/types.h>
 25 #include <sys/stat.h>
 26 
 27 #include "x264Encoder.h"
 28 
 29 #define FIFO "/tmp/fifo"
 30 
 31 class VideoMonitor
 32 {
 33         public:
 34                 VideoMonitor();
 35                 ~VideoMonitor();
 36                 int init();
 37                 int startMonitor();
 38                 int stopMonitor();
 39                 void Destroy();
 40 
 41         private:
 42                 pthread_t threadID_cam;
 43                 pthread_t threadID_live555;
 44                 static void *thread_cam(void *arg);
 45                 static void *thread_live555(void *arg);
 46 
 47                 static CvCapture *cap;
 48                 static int camHigh;
 49                 static int camWidth;
 50                 static RTSPServer* rtspServer;
 51                 static void play();
 52                 static void afterPlaying(void *);
 53 };
 54 
 55 
 56 #endif

videoMonitor.cpp

  1 #include "videoMonitor.h"
  2 
  3 using namespace cv;
  4 
  5 int VideoMonitor::camHigh = 240;
  6 int VideoMonitor::camWidth = 240;
  7 CvCapture *VideoMonitor::cap = NULL;
  8 RTSPServer* VideoMonitor::rtspServer = NULL;
  9 
 10 
 11 UsageEnvironment *env = NULL;
 12 char * ptr = NULL;
 13 H264VideoStreamFramer *videoSource = NULL;
 14 RTPSink *videoSink = NULL;
 15 
 16 EventTriggerId DeviceSource::eventTriggerId = 0;
 17 
 18 VideoMonitor::VideoMonitor()
 19 {
 20 }
 21 
 22 VideoMonitor::~VideoMonitor()
 23 {}
 24 
 25 int VideoMonitor::init()
 26 {
 27         mkfifo(FIFO, 0777);
 28         camHigh = 240; 
 29         camWidth = 320;
 30         return 0;
 31 }
 32 
 33 int VideoMonitor::startMonitor()
 34 {
 35         if(threadID_cam != 0){
 36                 printf("monitor is running !\n");
 37                 return -1;
 38         }
 39 
 40         if(cap != NULL){
 41                 printf("camera is running !\n");
 42                 return -1;
 43         }
 44 
 45         cap = cvCreateCameraCapture(-1);
 46         if(cap == NULL){
 47                 perror("open camera error!\n");
 48                 return -1;
 49         }
 50 
 51         if(pthread_create(&threadID_cam, NULL, thread_cam, NULL) != 0){
 52                 perror("create thread cam error!\n");
 53                 return -1;
 54         }
 55 
 56 
 57         //run live thread, only oncetime
 58         if(threadID_live555 == 0){
 59                 if(pthread_create(&threadID_live555, NULL, thread_live555, NULL) != 0){
 60                         perror("create thread live555 error!\n");
 61                         return -1;
 62                 }
 63         }
 64         return 0;
 65 }
 66 
 67 int VideoMonitor::stopMonitor()
 68 {
 69         pthread_cancel(threadID_cam);
 70         threadID_cam = 0;
 71 
 72         cvReleaseCapture(&cap);
 73         cap = NULL;
 74 
 75         return 0;
 76 }
 77 
 78 void VideoMonitor::Destroy()
 79 {
 80 }
 81 
 82 void *VideoMonitor::thread_cam(void *arg)
 83 {
 84         IplImage *pFrame = NULL;
 85         cvNamedWindow("result", 1);
 86 
 87         cvSetCaptureProperty(cap,CV_CAP_PROP_FRAME_WIDTH,320);
 88         cvSetCaptureProperty(cap,CV_CAP_PROP_FRAME_HEIGHT,240);
 89 
 90         x264Encoder x264(camWidth, camHigh, 0, 33);
 91 
 92         int fd = open(FIFO, O_WRONLY|O_CREAT, 0777);
 93         if(fd < 0){
 94                 printf("open fifo file error!");
 95                 return 0;
 96         }
 97 
 98         while(true){
 99                 pFrame = cvQueryFrame(cap);
100                 if(pFrame == NULL) break;
101 
102                 cvShowImage("result", pFrame);
103                 Mat mat = cvarrToMat(pFrame);
104                 int size = x264.EncodeOneFrame(mat);
105                 unsigned char *data = x264.GetEncodedFrame();
106                 write(fd, data, size);
107         }
108 }
109 
110 void *VideoMonitor::thread_live555(void *arg)
111 {
112         // Begin by setting up our usage environment:
113         TaskScheduler* scheduler = BasicTaskScheduler::createNew();
114         env = BasicUsageEnvironment::createNew(*scheduler);
115 
116         // Create 'groupsocks' for RTP and RTCP:
117         struct in_addr destinationAddress;
118         destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
119         // Note: This is a multicast address.  If you wish instead to stream
120         // using unicast, then you should use the "testOnDemandRTSPServer"
121         // test program - not this test program - as a model.
122 
123         const unsigned short rtpPortNum = 18888;
124         const unsigned short rtcpPortNum = rtpPortNum+1;
125         const unsigned char ttl = 255;
126 
127         const Port rtpPort(rtpPortNum);
128         const Port rtcpPort(rtcpPortNum);
129 
130         Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
131         rtpGroupsock.multicastSendOnly(); // we're a SSM source
132         Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
133         rtcpGroupsock.multicastSendOnly(); // we're a SSM source
134 
135         // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
136         OutPacketBuffer::maxSize = 600000;
137         videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);
138 
139         // Create (and start) a 'RTCP instance' for this RTP sink:
140         const unsigned estimatedSessionBandwidth = 10000; // in kbps; for RTCP b/w share
141         const unsigned maxCNAMElen = 100;
142         unsigned char CNAME[maxCNAMElen+1];
143         gethostname((char*)CNAME, maxCNAMElen);
144         CNAME[maxCNAMElen] = '\0'; // just in case
145         RTCPInstance* rtcp
146                 = RTCPInstance::createNew(*env, &rtcpGroupsock,
147                                 estimatedSessionBandwidth, CNAME,
148                                 videoSink, NULL /* we're a server */,
149                                 True /* we're a SSM source */);
150         // Note: This starts RTCP running automatically
151 
152         //RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
153         rtspServer = RTSPServer::createNew(*env, 8554);
154         if (rtspServer == NULL) {
155                 *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
156                 exit(1);
157         }
158         ServerMediaSession* sms
159                 = ServerMediaSession::createNew(*env, "testStream", FIFO,
160                                 "Session streamed by \"testH264VideoStreamer\"",
161                                 True /*SSM*/);
162         sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
163         rtspServer->addServerMediaSession(sms);
164 
165         char* url = rtspServer->rtspURL(sms);
166         *env << "Play this stream using the URL \"" << url << "\"\n";
167         delete[] url;
168 
169         // Start the streaming:
170         *env << "Beginning streaming...\n";
171         play();
172 
173         env->taskScheduler().doEventLoop(); // does not return
174         return 0;
175 }
176 
177 
178 
179 
180 void VideoMonitor::afterPlaying(void* /*clientData*/) {
181         *env << "...done reading from file\n";
182         videoSink->stopPlaying();
183         Medium::close(videoSource);
184         //Camera.Destory();
185         // Note that this also closes the input file that this source read from.
186 
187         // Start playing once again:
188         play();
189 }
190 
191 void VideoMonitor::play()
192 {
193         // Open the input file as a 'byte-stream file source':
194         ByteStreamFileSource* fileSource
195                 = ByteStreamFileSource::createNew(*env, FIFO);
196         if (fileSource == NULL) {
197                 *env << "Unable to open file \"" << FIFO
198                         << "\" as a byte-stream file source\n";
199                 exit(1);
200         }
201 
202         FramedSource* videoES = fileSource;
203 
204         // Create a framer for the Video Elementary Stream:
205         videoSource = H264VideoStreamFramer::createNew(*env, videoES);
206 
207         // Finally, start playing:
208         *env << "Beginning to read from file...\n";
209         videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
210 }

4 遇到的問題

  1. threadCam線程採集數據並編碼的速度會直接影響到推流後的播放效果,有卡頓、陰影的情況。
  2. 曾試圖結束thread_live555 線程,但是經查閱源碼發現,RTSPServer的析構函數爲protect ,無法釋放對象,導致該線程二次啓動時端口總是佔用而啓動失敗。
  3. 推流後的視頻有3s左右的延時。

解決辦法:
1.降低視頻採集圖像大小,在硬件算力一定的情況下,提高生產者吞吐率。
2. 經過閱讀源碼,RTSPServer線程佔用資源很少,socket的監聽採用IO多路服用,所以live555就設計成不可直接析構。
3. 推流效果有3s左右的延時,這個應該是取決於硬件的性能及live555的推流算法,圖像有采集-》壓縮-》寫入FIFO-》讀取FIFO-》socket發送-》VLC播放 這個鏈路很定有時間損耗,應該是在所難免,等主要工作完成後在花時間調試跟蹤吧。

鏈接

完整代碼貼到個人github
https://github.com/qq930248524/live555.git
歡迎下方討論,指正代碼不足。

不足:thread_cam 線程的結束使用thread_cancel,會導致線程資源不能回收,應該考慮使用信號量+互斥鎖,通知子線程退出,並回收資源。

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章