Windows下Python調用海康SDK實時顯示網絡攝像頭

1.安裝OpenCV2.4.13

2.將海康SDK路徑添加到系統環境變量

3.使用swig生成接口文件

1.安裝swig

2.下載OpenCV-swig接口文件

3.將接口文件lib文件夾下的文件與下面三個源文件放到一起

//HKIPcamera.cpp
#include <opencv\cv.h>
#include <opencv\highgui.h>
#include <opencv2\opencv.hpp>
#include <iostream>
#include <time.h>
#include <cstdio>
#include <cstring>
#include <iostream>
#include <windows.h>
#include "HCNetSDK.h"
//#include "PlayM4.h"
#include "plaympeg4.h"



//#include "global.h"
//#include "readCamera.h"

#define USECOLOR 1

using namespace cv;
using namespace std;

//--------------------------------------------
int iPicNum = 0;//Set channel NO.
LONG nPort = -1;
HWND hWnd = NULL;
CRITICAL_SECTION g_cs_frameList;
list<Mat> g_frameList;
LONG lUserID;
NET_DVR_DEVICEINFO_V30 struDeviceInfo;
HANDLE hThread;
LONG lRealPlayHandle = -1;

void yv12toYUV(char *outYuv, char *inYv12, int width, int height, int widthStep)
{
    int col, row;
    unsigned int Y, U, V;
    int tmp;
    int idx;

    //printf("widthStep=%d.\n",widthStep);

    for (row = 0; row<height; row++)
    {
        idx = row * widthStep;
        int rowptr = row*width;

        for (col = 0; col<width; col++)
        {
            //int colhalf=col>>1;
            tmp = (row / 2)*(width / 2) + (col / 2);
            //         if((row==1)&&( col>=1400 &&col<=1600))
            //         { 
            //          printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp);
            //          printf("row*width+col=%d,width*height+width*height/4+tmp=%d,width*height+tmp=%d.\n",row*width+col,width*height+width*height/4+tmp,width*height+tmp);
            //         } 
            Y = (unsigned int)inYv12[row*width + col];
            U = (unsigned int)inYv12[width*height + width*height / 4 + tmp];
            V = (unsigned int)inYv12[width*height + tmp];
            //         if ((col==200))
            //         { 
            //         printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp);
            //         printf("width*height+width*height/4+tmp=%d.\n",width*height+width*height/4+tmp);
            //         return ;
            //         }
            if ((idx + col * 3 + 2)> (1200 * widthStep))
            {
                //printf("row * widthStep=%d,idx+col*3+2=%d.\n",1200 * widthStep,idx+col*3+2);
            }
            outYuv[idx + col * 3] = Y;
            outYuv[idx + col * 3 + 1] = U;
            outYuv[idx + col * 3 + 2] = V;
        }
    }
    //printf("col=%d,row=%d.\n",col,row);
}



//解碼回調 視頻爲YUV數據(YV12),音頻爲PCM數據
void CALLBACK DecCBFun(long nPort, char * pBuf, long nSize, FRAME_INFO * pFrameInfo, long nReserved1, long nReserved2)
{
    long lFrameType = pFrameInfo->nType;

    if (lFrameType == T_YV12)
    {
#if USECOLOR
        //int start = clock();
        static IplImage* pImgYCrCb = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);//得到圖像的Y分量  
        yv12toYUV(pImgYCrCb->imageData, pBuf, pFrameInfo->nWidth, pFrameInfo->nHeight, pImgYCrCb->widthStep);//得到全部RGB圖像
        static IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);
        cvCvtColor(pImgYCrCb, pImg, CV_YCrCb2RGB);
        //int end = clock();
#else
        static IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 1);
        memcpy(pImg->imageData, pBuf, pFrameInfo->nWidth*pFrameInfo->nHeight);
#endif
        //printf("%d\n",end-start);

        //Mat frametemp(pImg), frame;

        //frametemp.copyTo(frame);
        //      cvShowImage("IPCamera",pImg);
        //      cvWaitKey(1);
        EnterCriticalSection(&g_cs_frameList);
        g_frameList.push_back(pImg);
        LeaveCriticalSection(&g_cs_frameList);

#if USECOLOR
        //      cvReleaseImage(&pImgYCrCb);
        //      cvReleaseImage(&pImg);
#else
        /*cvReleaseImage(&pImg);*/
#endif
        //此時是YV12格式的視頻數據,保存在pBuf中,可以fwrite(pBuf,nSize,1,Videofile);
        //fwrite(pBuf,nSize,1,fp);
    }
    /***************
    else if (lFrameType ==T_AUDIO16)
    {
    //此時是音頻數據,數據保存在pBuf中,可以fwrite(pBuf,nSize,1,Audiofile);

    }
    else
    {

    }
    *******************/

}


///實時流回調
void CALLBACK fRealDataCallBack(LONG lRealHandle, DWORD dwDataType, BYTE *pBuffer, DWORD dwBufSize, void *pUser)
{
    DWORD dRet;
    switch (dwDataType)
    {
    case NET_DVR_SYSHEAD:    //系統頭
        if (!PlayM4_GetPort(&nPort)) //獲取播放庫未使用的通道號
        {
            break;
        }
        if (dwBufSize > 0)
        {
            if (!PlayM4_OpenStream(nPort, pBuffer, dwBufSize, 1024 * 1024))
            {
                dRet = PlayM4_GetLastError(nPort);
                break;
            }
            //設置解碼回調函數 只解碼不顯示
            if (!PlayM4_SetDecCallBack(nPort, DecCBFun))
            {
                dRet = PlayM4_GetLastError(nPort);
                break;
            }

            //設置解碼回調函數 解碼且顯示
            //if (!PlayM4_SetDecCallBackEx(nPort,DecCBFun,NULL,NULL))
            //{
            //  dRet=PlayM4_GetLastError(nPort);
            //  break;
            //}

            //打開視頻解碼
            if (!PlayM4_Play(nPort, hWnd))
            {
                dRet = PlayM4_GetLastError(nPort);
                break;
            }

            //打開音頻解碼, 需要碼流是複合流
            //          if (!PlayM4_PlaySound(nPort))
            //          {
            //              dRet=PlayM4_GetLastError(nPort);
            //              break;
            //          }       
        }
        break;

    case NET_DVR_STREAMDATA:   //碼流數據
        if (dwBufSize > 0 && nPort != -1)
        {
            BOOL inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
            while (!inData)
            {
                Sleep(10);
                inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
                OutputDebugString(L"PlayM4_InputData failed \n");
            }
        }
        break;
    }
}

void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser)
{
    char tempbuf[256] = { 0 };
    switch (dwType)
    {
    case EXCEPTION_RECONNECT:    //預覽時重連
        printf("----------reconnect--------%d\n", time(NULL));
        break;
    default:
        break;
    }
}

bool OpenCamera(char* ip, char* usr, char* password)
{
    lUserID = NET_DVR_Login_V30(ip, 8000, usr, password, &struDeviceInfo);
    if (lUserID == 0)
    {
        cout << "Log in success!" << endl;
        return TRUE;
    }
    else
    {
        printf("Login error, %d\n", NET_DVR_GetLastError());
        NET_DVR_Cleanup();
        return FALSE;
    }
}
DWORD WINAPI ReadCamera(LPVOID IpParameter)
{
    //---------------------------------------
    //設置異常消息回調函數
    NET_DVR_SetExceptionCallBack_V30(0, NULL, g_ExceptionCallBack, NULL);

    //cvNamedWindow("Mywindow", 0);
    //cvNamedWindow("IPCamera", 0);

    //HWND  h = (HWND)cvGetWindowHandle("Mywindow");
    //h = cvNamedWindow("IPCamera");
    //---------------------------------------
    //啓動預覽並設置回調數據流 
    NET_DVR_CLIENTINFO ClientInfo;
    ClientInfo.lChannel = 1;        //Channel number 設備通道號
    ClientInfo.hPlayWnd = NULL;     //窗口爲空,設備SDK不解碼只取流
    ClientInfo.lLinkMode = 1;       //Main Stream
    ClientInfo.sMultiCastIP = NULL;

    LONG lRealPlayHandle;
    lRealPlayHandle = NET_DVR_RealPlay_V30(lUserID, &ClientInfo, fRealDataCallBack, NULL, TRUE);
    if (lRealPlayHandle<0)
    {
        printf("NET_DVR_RealPlay_V30 failed! Error number: %d\n", NET_DVR_GetLastError());
        return -1;
    }
    else
        cout << "碼流回調成功!" << endl;
    Sleep(-1);
    if (!NET_DVR_StopRealPlay(lRealPlayHandle))
    {
        printf("NET_DVR_StopRealPlay error! Error number: %d\n", NET_DVR_GetLastError());
        return 0;
    }
    NET_DVR_Logout(lUserID);
    NET_DVR_Cleanup();
    return 0;
}


void init(char* ip, char* usr, char* password){
    //HANDLE hThread;
    //LPDWORD threadID;
    //---------------------------------------
    // 初始化
    NET_DVR_Init();
    //設置連接時間與重連時間
    NET_DVR_SetConnectTime(2000, 1);
    NET_DVR_SetReconnect(10000, true);
    OpenCamera(ip, usr, password);
    InitializeCriticalSection(&g_cs_frameList);
    hThread = ::CreateThread(NULL, 0, ReadCamera, NULL, 0, 0);
}

Mat getframe(){
    Mat frame1;
    EnterCriticalSection(&g_cs_frameList);
    while (!g_frameList.size()){
        LeaveCriticalSection(&g_cs_frameList);
        EnterCriticalSection(&g_cs_frameList);
    }
    list<Mat>::iterator it;
    it = g_frameList.end();
    it--;
    Mat dbgframe = (*(it));
    (*g_frameList.begin()).copyTo(frame1);
    frame1 = dbgframe;
    g_frameList.pop_front();
    //imshow("camera", frame1);
    //waitKey(1);

    g_frameList.clear();   // 丟掉舊的幀
    LeaveCriticalSection(&g_cs_frameList);
    return(frame1);
}

void release(){
    ::CloseHandle(hThread);
    NET_DVR_StopRealPlay(lRealPlayHandle);
    //關閉預覽
    NET_DVR_Logout(lUserID);
    //註銷用戶
    NET_DVR_Cleanup();
}
//HKIPcamera.h
#include <opencv2/opencv.hpp>
using namespace cv;

void init(char* ip, char* usr, char* password);
Mat getframe();
void release();
// HKIPcamera.i
/*  Example of wrapping a C function that takes a C double array as input using
 *  numpy typemaps for SWIG. */
%module HKIPcamera
%include <opencv/mat.i>
%cv_mat__instantiate_defaults
%header %{
    /*  Includes the header in the wrapper code */
    #include "HKIPcamera.h"
%}

%include "HKIPcamera.h"

3.命令行輸入下面命令生成HKIPcamera_wrap.cxx文件(cd到源文件文件夾下,opencv路徑改成自己的路徑)

//將其中的OpenCV路徑改成自己的
swig -ID:/opencv/build/include -python -c++ HKIPcamera.i  

4.修改plaympeg4.h文件

這裏寫圖片描述
在extern “C” __declspec(dllexport) 的“C”和 下劃線之間加空格,否則會報錯。

5.下載boost庫,將頭文件和庫文件添加到項目中

3.編譯動態鏈接庫

參考以下三篇博客編譯動態鏈接庫
Python調用C++函數(SWIG,VS2013使用numpy.i完成Numpy與C++數組轉換)
linux下Python調用海康SDK實時顯示網絡攝像頭
OpenCV+海康威視攝像頭的實時讀取

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章