嵌入式設備上ffmpeg取攝像頭+硬編碼+rtmp推送

手中的板子的CPU是三星coretex-a9的,板子有硬編能力(至於是哪個芯片沒仔細看),通過一至兩週的努力,成功實現硬編碼成H.264並投遞到rtmp服務器。大體實現如下:

1.使用ffmpeg取攝像頭YUV數據;

2.將YUV數據餵給三星硬編API;

3.將硬編的數據通過librtmp投遞;

廢話少說,上代碼,代碼有些糙,將就看:

#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <signal.h>

#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavdevice/avdevice.h>
#include <libswscale/swscale.h>

#include <librtmp/rtmp.h>

#include <nx_fourcc.h>
#include <nx_vip.h>   // VIP
#include <nx_dsp.h>  // Display
#include “nx_video_api.h” // Video En/Decoder

#include “sps_decode.h”

#define MAX_FILE_NAME   1024
#define MAX_SEQ_BUF_SIZE  (4*1024)

//定義包頭長度,RTMP_MAX_HEADER_SIZE=18
#define RTMP_HEAD_SIZE   (sizeof(RTMPPacket)+RTMP_MAX_HEADER_SIZE)
//存儲Nal單元數據的buffer大小
#define BUFFER_SIZE 327680
#define MAX_PACKET_BYTES (RTMP_HEAD_SIZE+BUFFER_SIZE)
//搜尋Nal單元時的一些標誌
#define GOT_A_NAL_CROSS_BUFFER BUFFER_SIZE+1
#define GOT_A_NAL_INCLUDE_A_BUFFER BUFFER_SIZE+2
#define NO_MORE_BUFFER_TO_READ BUFFER_SIZE+3
enum 

  VIDEO_CODECID_H264 = 7, 
}; 
/**
 * _NaluUnit
 * 內部結構體。該結構體主要用於存儲和傳遞Nal單元的類型、大小和數據
 */
typedef struct _NaluUnit 

 int type; 
    int size; 
 unsigned char *data; 
}NaluUnit;

typedef struct _RTMPMetadata
{
 // video, must be h264 type
 unsigned int nWidth;
 unsigned int nHeight;
 unsigned int nFrameRate;  // fps
 unsigned int nVideoDataRate; // bps
 unsigned int nSpsLen;
 unsigned char* Sps;
 unsigned int nPpsLen;
 unsigned char* Pps;

// audio, must be aac type
 bool         bHasAudio;
 unsigned int nAudioDatarate;
 unsigned int nAudioSampleRate;
 unsigned int nAudioSampleSize;
 int    nAudioFmt;
 unsigned int nAudioChannels;
 char      pAudioSpecCfg;
 unsigned int nAudioSpecCfgLen;

} RTMPMetadata,*LPRTMPMetadata;

static void dumpdata( void *data, int len, const char *msg )
{
 int i=0;
 unsigned char *byte = (unsigned char *)data;
 printf(“Dump Data : %s”, msg);
 for( i=0 ; i<len ; i ++ )
 {
  if( i!=0 && i%16 == 0 ) printf("\n\t");
  printf("%.2x", byte[i] );
  if( i%4 == 3 ) printf(" “);
 }
 printf(”\n");
}

//網絡字節序轉換
char * put_byte( char *output, uint8_t nVal )   
{   
 output[0] = nVal;   
 return output+1;   
}

char * put_be16(char *output, uint16_t nVal )   
{   
 output[1] = nVal & 0xff;   
 output[0] = nVal >> 8;   
 return output+2;   
}

char * put_be24(char *output,uint32_t nVal )   
{   
 output[2] = nVal & 0xff;   
 output[1] = nVal >> 8;   
 output[0] = nVal >> 16;   
 return output+3;   
}   
char * put_be32(char *output, uint32_t nVal )   
{   
 output[3] = nVal & 0xff;   
 output[2] = nVal >> 8;   
 output[1] = nVal >> 16;   
 output[0] = nVal >> 24;   
 return output+4;   
}   
char *  put_be64( char *output, uint64_t nVal )   
{   
 output=put_be32( output, nVal >> 32 );   
 output=put_be32( output, nVal );   
 return output;   
}

char * put_amf_string( char *c, const char *str )   
{   
 uint16_t len = strlen( str );   
 c=put_be16( c, len );   
 memcpy(c,str,len);   
 return c+len;   
}   
char * put_amf_double( char *c, double d )   
{   
 c++ = AMF_NUMBER;  / type: Number */   
 {   
  unsigned char *ci, *co;   
  ci = (unsigned char *)&d;   
  co = (unsigned char *)c;   
  co[0] = ci[7];   
  co[1] = ci[6];   
  co[2] = ci[5];   
  co[3] = ci[4];   
  co[4] = ci[3];   
  co[5] = ci[2];   
  co[6] = ci[1];   
  co[7] = ci[0];   
 }   
 return c+8;   

unsigned int g_nStop;

unsigned int  m_nFileBufSize;
unsigned int  nalhead_pos;
RTMP* m_pRtmp; 
RTMPMetadata g_metaData;
char m_pBuff[MAX_PACKET_BYTES];
unsigned char *m_pFileBuf; 
unsigned char m_pFileBuf_tmp;
unsigned char
m_pFileBuf_tmp_old; //used for realloc

/**
 * 初始化並連接到服務器
 *
 * @param url 服務器上對應webapp的地址
 *     
 * @成功則返回1 , 失敗則返回0
 /
int RTMP264_Connect(const char
url) 

 nalhead_pos=0;
 m_nFileBufSize=BUFFER_SIZE;
 m_pFileBuf=(unsigned char*)malloc(BUFFER_SIZE);
 m_pFileBuf_tmp=(unsigned char*)malloc(BUFFER_SIZE);

m_pRtmp = RTMP_Alloc();
 RTMP_Init(m_pRtmp);
 /設置URL/
 if (RTMP_SetupURL(m_pRtmp,(char*)url) == FALSE)
 {
  RTMP_Free(m_pRtmp);
  return false;
 }
 /設置可寫,即發佈流,這個函數必須在連接前使用,否則無效/
 RTMP_EnableWrite(m_pRtmp);
 /連接服務器/
 if (RTMP_Connect(m_pRtmp, NULL) == FALSE)
 {
  RTMP_Free(m_pRtmp);
  return false;
 }

/連接流/
 if (RTMP_ConnectStream(m_pRtmp,0) == FALSE)
 {
  RTMP_Close(m_pRtmp);
  RTMP_Free(m_pRtmp);
  return false;
 }
 return true; 
}

/**
 * 斷開連接,釋放相關的資源。
 *
 */   
void RTMP264_Close() 

 if(m_pRtmp) 
 { 
  RTMP_Close(m_pRtmp); 
  RTMP_Free(m_pRtmp); 
  m_pRtmp = NULL; 
 } 
 if (m_pFileBuf != NULL)
 { 
  free(m_pFileBuf);
 } 
 if (m_pFileBuf_tmp != NULL)
 { 
  free(m_pFileBuf_tmp);
 }
}

/**
 * 發送RTMP數據包
 *
 * @param nPacketType 數據類型
 * @param data 存儲數據內容
 * @param size 數據大小
 * @param nTimestamp 當前包的時間戳
 *
 * @成功則返回 1 , 失敗則返回一個小於0的數
 */
int SendPacket(unsigned int nPacketType,unsigned char data,unsigned int size,unsigned int nTimestamp) 

    if(m_pRtmp == NULL) return -1;
 RTMPPacket
packet;
 /分配包內存和初始化,len爲包體長度/
 packet = (RTMPPacket *)m_pBuff;
 
 /包體內存/
 packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
 packet->m_nBodySize = size;
 //memcpy(packet->m_body,data,size);
 packet->m_hasAbsTimestamp = 0;
 packet->m_packetType = nPacketType; /此處爲類型有兩種一種是音頻,一種是視頻/
 packet->m_nInfoField2 = m_pRtmp->m_stream_id;
 packet->m_nChannel = 0x04;

packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
 if (RTMP_PACKET_TYPE_AUDIO ==nPacketType && size !=4)
 {
  packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
 }
 packet->m_nTimeStamp = nTimestamp;
 /發送/
 int nRet =0;
 if (RTMP_IsConnected(m_pRtmp))
 {
  nRet = RTMP_SendPacket(m_pRtmp,packet,TRUE); /TRUE爲放進發送隊列,FALSE是不放進發送隊列,直接發送/
 }

return nRet; 
}

/**
 * 發送視頻的sps和pps信息
 *
 * @param pps 存儲視頻的pps信息
 * @param pps_len 視頻的pps信息長度
 * @param sps 存儲視頻的pps信息
 * @param sps_len 視頻的sps信息長度
 *
 * @成功則返回 1 , 失敗則返回0
 */
int SendVideoSpsPps(unsigned char *pps,int pps_len,unsigned char * sps,int sps_len)
{
    //dumpdata(pps, pps_len, "pps: ");
    //dumpdata(sps, sps_len, "sps: ");
    if(m_pRtmp == NULL)
    {
        return 0;
    }
 RTMPPacket * packet=NULL;//rtmp包結構
 unsigned char * body=NULL;
 int i;
 packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+1024);
 //RTMPPacket_Reset(packet);//重置packet狀態
 memset(packet,0,RTMP_HEAD_SIZE+1024);
 packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
 body = (unsigned char *)packet->m_body;
 i = 0;
 body[i++] = 0x17;
 body[i++] = 0x00;

body[i++] = 0x00;
 body[i++] = 0x00;
 body[i++] = 0x00;

/AVCDecoderConfigurationRecord/
 body[i++] = 0x01;
 body[i++] = sps[1];
 body[i++] = sps[2];
 body[i++] = sps[3];
 body[i++] = 0xff;

/sps/
 body[i++]   = 0xe1;
 body[i++] = (sps_len >> 8) & 0xff;
 body[i++] = sps_len & 0xff;
 memcpy(&body[i],sps,sps_len);
 i +=  sps_len;

/pps/
 body[i++]   = 0x01;
 body[i++] = (pps_len >> 8) & 0xff;
 body[i++] = (pps_len) & 0xff;
 memcpy(&body[i],pps,pps_len);
 i +=  pps_len;

packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
 packet->m_nBodySize = i;
 packet->m_nChannel = 0x04;
 packet->m_nTimeStamp = 0;
 packet->m_hasAbsTimestamp = 0;
 packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
 packet->m_nInfoField2 = m_pRtmp->m_stream_id;

/調用發送接口/
 int nRet = 0;
 if (RTMP_IsConnected(m_pRtmp))
 {
     nRet = RTMP_SendPacket(m_pRtmp,packet,TRUE);
    }
 free(packet);    //釋放內存
 return nRet;
}

/**
 * 發送H264數據幀
 *
 * @param data 存儲數據幀內容
 * @param size 數據幀的大小
 * @param bIsKeyFrame 記錄該幀是否爲關鍵幀
 * @param nTimeStamp 當前幀的時間戳
 *
 * @成功則返回 1 , 失敗則返回0
 */
int SendH264Packet(unsigned char *data,unsigned int size,int bIsKeyFrame,unsigned int nTimeStamp) 

 if(data == NULL && size<11){ 
  return false; 
 }

unsigned char body = (unsigned char)(m_pBuff+RTMP_HEAD_SIZE); 
 
 int i = 0;
 if(bIsKeyFrame){ 
  body[i++] = 0x17;// 1:Iframe  7:AVC  
  body[i++] = 0x01;// AVC NALU  
  body[i++] = 0x00; 
  body[i++] = 0x00; 
  body[i++] = 0x00;

// NALU size  
  body[i++] = size>>24 &0xff; 
  body[i++] = size>>16 &0xff; 
  body[i++] = size>>8 &0xff; 
  body[i++] = size&0xff;
  // NALU data  
  memcpy(&body[i],data,size); 
  SendVideoSpsPps(g_metaData.Pps,g_metaData.nPpsLen,g_metaData.Sps,g_metaData.nSpsLen);
 }else{ 
  body[i++] = 0x27;// 2:Pframe  7:AVC  
  body[i++] = 0x01;// AVC NALU  
  body[i++] = 0x00; 
  body[i++] = 0x00; 
  body[i++] = 0x00;

// NALU size  
  body[i++] = size>>24 &0xff; 
  body[i++] = size>>16 &0xff; 
  body[i++] = size>>8 &0xff; 
  body[i++] = size&0xff;
  // NALU data  
  memcpy(&body[i],data,size); 
 } 
 
 int bRet = SendPacket(RTMP_PACKET_TYPE_VIDEO,body,i+size,nTimeStamp);

return bRet; 
}

/**
 * 從內存中讀取出第一個Nal單元
 *
 * @param nalu 存儲nalu數據
 * @param read_buffer 回調函數,當數據不足的時候,系統會自動調用該函數獲取輸入數據。
 *     2個參數功能:
 *     uint8_t *buf:外部數據送至該地址
 *     int buf_size:外部數據大小
 *     返回值:成功讀取的內存大小
 * @成功則返回 1 , 失敗則返回0
 */
int ReadFirstNaluFromBuf(NaluUnit *nalu,int (*read_buffer)(uint8_t *buf, int buf_size))
{
 int naltail_pos=nalhead_pos;
 memset(m_pFileBuf_tmp,0,BUFFER_SIZE);
 while(nalhead_pos<m_nFileBufSize) 
 { 
  //search for nal header
  if(m_pFileBuf[nalhead_pos++] == 0x00 &&
   m_pFileBuf[nalhead_pos++] == 0x00)
  {
   if(m_pFileBuf[nalhead_pos++] == 0x01)
    goto gotnal_head;
   else
   {
    //cuz we have done an i++ before,so we need to roll back now
    nalhead_pos–;  
    if(m_pFileBuf[nalhead_pos++] == 0x00 &&
     m_pFileBuf[nalhead_pos++] == 0x01)
     goto gotnal_head;
    else
     continue;
   }
  }
  else
   continue;

//search for nal tail which is also the head of next nal
gotnal_head:
  //normal case:the whole nal is in this m_pFileBuf
  naltail_pos = nalhead_pos; 
  while (naltail_pos<m_nFileBufSize) 
  { 
   if(m_pFileBuf[naltail_pos++] == 0x00 &&
    m_pFileBuf[naltail_pos++] == 0x00 )
   { 
    if(m_pFileBuf[naltail_pos++] == 0x01)
    {
     nalu->size = (naltail_pos-3)-nalhead_pos;
     break;
    }
    else
    {
     naltail_pos–;
     if(m_pFileBuf[naltail_pos++] == 0x00 &&
      m_pFileBuf[naltail_pos++] == 0x01)
     { 
      nalu->size = (naltail_pos-4)-nalhead_pos;
      break;
     }
    }
   } 
  }

nalu->type = m_pFileBuf[nalhead_pos]&0x1f;
  memcpy(m_pFileBuf_tmp,m_pFileBuf+nalhead_pos,nalu->size);
  nalu->data=m_pFileBuf_tmp;
  nalhead_pos=naltail_pos;
  return TRUE;     
 }
}

/**
 * 從內存中讀取出一個Nal單元
 *
 * @param nalu 存儲nalu數據
 * @param read_buffer 回調函數,當數據不足的時候,系統會自動調用該函數獲取輸入數據。
 *     2個參數功能:
 *     uint8_t *buf:外部數據送至該地址
 *     int buf_size:外部數據大小
 *     返回值:成功讀取的內存大小
 * @成功則返回 1 , 失敗則返回0
 */
int ReadOneNaluFromBuf(NaluUnit *nalu,int (*read_buffer)(uint8_t *buf, int buf_size)) 
{   
 
 int naltail_pos=nalhead_pos;
 int ret;
 int nalustart;//nal的開始標識符是幾個00
 memset(m_pFileBuf_tmp,0,BUFFER_SIZE);
 nalu->size=0;
 while(1)
 {
  if(nalhead_pos==NO_MORE_BUFFER_TO_READ)
   return FALSE;
  while(naltail_pos<m_nFileBufSize) 
  { 
   //search for nal tail
   if(m_pFileBuf[naltail_pos++] == 0x00 &&
    m_pFileBuf[naltail_pos++] == 0x00)
   {
    if(m_pFileBuf[naltail_pos++] == 0x01)
    { 
     nalustart=3;
     goto gotnal ;
    }
    else
    {
     //cuz we have done an i++ before,so we need to roll back now
     naltail_pos–;  
     if(m_pFileBuf[naltail_pos++] == 0x00 &&
      m_pFileBuf[naltail_pos++] == 0x01)
     {
      nalustart=4;
      goto gotnal;
     }
     else
      continue;
    }
   }
   else
    continue;

gotnal: 
     /**
     *special case1:parts of the nal lies in a m_pFileBuf and we have to read from buffer
     *again to get the rest part of this nal
     /
    if(nalhead_posGOT_A_NAL_CROSS_BUFFER || nalhead_posGOT_A_NAL_INCLUDE_A_BUFFER)
    {
     nalu->size = nalu->size+naltail_pos-nalustart;
     if(nalu->size>BUFFER_SIZE)
     {
      m_pFileBuf_tmp_old=m_pFileBuf_tmp; //// save pointer in case realloc fails
      if((m_pFileBuf_tmp = (unsigned char
)realloc(m_pFileBuf_tmp,nalu->size)) ==  NULL )
      {
       free( m_pFileBuf_tmp_old );  // free original block
       return FALSE;
      }
     }
     memcpy(m_pFileBuf_tmp+nalu->size+nalustart-naltail_pos,m_pFileBuf,naltail_pos-nalustart);
     nalu->data=m_pFileBuf_tmp;
     nalhead_pos=naltail_pos;
     return TRUE;
    }
    //normal case:the whole nal is in this m_pFileBuf
    else
    { 
     nalu->type = m_pFileBuf[nalhead_pos]&0x1f;
     nalu->size=naltail_pos-nalhead_pos-nalustart;
     if(nalu->type==0x06)
     {
      nalhead_pos=naltail_pos;
      continue;
     }
     memcpy(m_pFileBuf_tmp,m_pFileBuf+nalhead_pos,nalu->size);
     nalu->data=m_pFileBuf_tmp;
     nalhead_pos=naltail_pos;
     return TRUE;   
    }      
  }

if(naltail_pos>=m_nFileBufSize && nalhead_pos!=GOT_A_NAL_CROSS_BUFFER && nalhead_pos != GOT_A_NAL_INCLUDE_A_BUFFER)
  {
   nalu->size = BUFFER_SIZE-nalhead_pos;
   nalu->type = m_pFileBuf[nalhead_pos]&0x1f;
   memcpy(m_pFileBuf_tmp,m_pFileBuf+nalhead_pos,nalu->size);
   if((ret=read_buffer(m_pFileBuf,m_nFileBufSize))<BUFFER_SIZE)
   {
    memcpy(m_pFileBuf_tmp+nalu->size,m_pFileBuf,ret);
    nalu->size=nalu->size+ret;
    nalu->data=m_pFileBuf_tmp;
    nalhead_pos=NO_MORE_BUFFER_TO_READ;
    return FALSE;
   }
   naltail_pos=0;
   nalhead_pos=GOT_A_NAL_CROSS_BUFFER;
   continue;
  }
  if(nalhead_pos==GOT_A_NAL_CROSS_BUFFER || nalhead_pos == GOT_A_NAL_INCLUDE_A_BUFFER)
  {
   nalu->size = BUFFER_SIZE+nalu->size;
    
    m_pFileBuf_tmp_old=m_pFileBuf_tmp; //// save pointer in case realloc fails
    if((m_pFileBuf_tmp = (unsigned char*)realloc(m_pFileBuf_tmp,nalu->size)) ==  NULL )
    {
     free( m_pFileBuf_tmp_old );  // free original block
     return FALSE;
    }

memcpy(m_pFileBuf_tmp+nalu->size-BUFFER_SIZE,m_pFileBuf,BUFFER_SIZE);
   
   if((ret=read_buffer(m_pFileBuf,m_nFileBufSize))<BUFFER_SIZE)
   {
    memcpy(m_pFileBuf_tmp+nalu->size,m_pFileBuf,ret);
    nalu->size=nalu->size+ret;
    nalu->data=m_pFileBuf_tmp;
    nalhead_pos=NO_MORE_BUFFER_TO_READ;
    return FALSE;
   }
   naltail_pos=0;
   nalhead_pos=GOT_A_NAL_INCLUDE_A_BUFFER;
   continue;
  }
 }
 return FALSE; 
}

/**
 * 將內存中的一段H.264編碼的視頻數據利用RTMP協議發送到服務器
 *
 * @param read_buffer 回調函數,當數據不足的時候,系統會自動調用該函數獲取輸入數據。
 *     2個參數功能:
 *     uint8_t *buf:外部數據送至該地址
 *     int buf_size:外部數據大小
 *     返回值:成功讀取的內存大小
 * @成功則返回1 , 失敗則返回0
 */
int RTMP264_Send(int (*read_buffer)(unsigned char *buf, int buf_size)) 
{   
 int ret;
 uint32_t now,last_update;
  
 memset(&g_metaData,0,sizeof(RTMPMetadata));
 memset(m_pFileBuf,0,BUFFER_SIZE);
 if((ret=read_buffer(m_pFileBuf,m_nFileBufSize))<0)
 {
  return FALSE;
 }

NaluUnit naluUnit; 
 // 讀取SPS幀  
 ReadFirstNaluFromBuf(&naluUnit,read_buffer); 
 g_metaData.nSpsLen = naluUnit.size; 
 g_metaData.Sps=(unsigned char*)malloc(naluUnit.size);
 memcpy(g_metaData.Sps,naluUnit.data,naluUnit.size);
    dumpdata(g_metaData.Sps, g_metaData.nSpsLen, "SPS: ");
   
 // 讀取PPS幀  
 ReadOneNaluFromBuf(&naluUnit,read_buffer); 
 g_metaData.nPpsLen = naluUnit.size;
 g_metaData.Pps=(unsigned char*)malloc(naluUnit.size);
 memcpy(g_metaData.Pps,naluUnit.data,naluUnit.size);
 dumpdata(g_metaData.Pps, g_metaData.nPpsLen, "PPS: ");
 
 // 解碼SPS,獲取視頻圖像寬、高信息  
 int width = 0,height = 0, fps=0; 
 h264_decode_sps(g_metaData.Sps,g_metaData.nSpsLen,&width,&height,&fps); 
 printf(“00width:%d,height:%d, fps:%d\n”, width, height, fps);
 //g_metaData.nWidth = width; 
 //g_metaData.nHeight = height; 
 if(fps)
  g_metaData.nFrameRate = fps;
 else
  g_metaData.nFrameRate = 30;

printf(“width:%d,height:%d, fps:%d\n”, width, height, fps);
 //發送PPS,SPS
 //ret=SendVideoSpsPps(g_metaData.Pps,g_metaData.nPpsLen,g_metaData.Sps,g_metaData.nSpsLen);
 //if(ret!=1)
 // return FALSE;

unsigned int tick = 0; 
 unsigned int tick_gap = 1000/g_metaData.nFrameRate;
 ReadOneNaluFromBuf(&naluUnit,read_buffer);
 int bKeyframe  = (naluUnit.type == 0x05) ? TRUE : FALSE;
 while(SendH264Packet(naluUnit.data,naluUnit.size,bKeyframe,tick)) 
 {   
got_sps_pps:
  if(!ReadOneNaluFromBuf(&naluUnit,read_buffer))
    goto end;
  if(naluUnit.type == 0x07 || naluUnit.type == 0x08)
   goto got_sps_pps;
  bKeyframe  = (naluUnit.type == 0x05) ? TRUE : FALSE;
  tick +=tick_gap;
  //msleep(tick_gap-now+last_update); 
  sleep(40);
 } 
 end:
 free(g_metaData.Sps);
 free(g_metaData.Pps);
 return TRUE; 
}

//解析硬編碼 NX_VidEncGetSeqInfo 中的SPS和PPS
int decode_sps_pps(RTMPMetadata* pMeta,unsigned char * buff, int size)
{
    dumpdata(buff, size, "SEQ of H264: ");
   
    pMeta->nSpsLen = 9; 
 pMeta->Sps=(unsigned char*)malloc(pMeta->nSpsLen);
 memcpy(pMeta->Sps,buff+4, pMeta->nSpsLen);
    dumpdata(pMeta->Sps, pMeta->nSpsLen, "SPS: ");
   
    pMeta->nPpsLen = 4; 
 pMeta->Pps=(unsigned char*)malloc(4);
 memcpy(pMeta->Pps,buff+17,4);
 dumpdata(pMeta->Pps, pMeta->nPpsLen, "PPS: ");
 
    return 0;
}

FILE *fp_send1; 
 
//讀文件的回調函數 
//we use this callback function to read data from buffer 
int read_buffer1(unsigned char *buf, int buf_size ){ 
    if(!feof(fp_send1)){ 
        int true_size=fread(buf,1,buf_size,fp_send1); 
        return true_size; 
    }else{ 
        return -1; 
    } 
}

void version()
{
    fprintf(stdout, “V1.0\n”);
}
void usage(char* app)
{
    fprintf(stdout, “\t-i input device name:/dev/video0\n”);
    fprintf(stdout, “\t-y record YUV file name : out.yuv \n”);
    fprintf(stdout, “\t-r record H.264 file name: out.h264\n”);
    fprintf(stdout, “\t-u url of rtmp: rtmp://192.168.1.102:1935/hls/cam1\n”);
    fprintf(stdout, “\t-v show version.\n”);
    fprintf(stdout, “\t-h show help.\n”);
    fprintf(stdout, “for examples:\t-i /dev/video10 -u rtmp://192.168.1.6/hls/cam1\n”);
    fprintf(stdout, “\t\t-i /dev/video10 -y 1.yuv\n”);
    fprintf(stdout, “\t\t-i /dev/video10 -r 1.h264\n”);
}

void my_exit(int s){ 
    g_nStop = 1;
}

int main(int argc, char* argv[])
{
    g_nStop = 0;
    struct sigaction sigIntHandler; 
    sigIntHandler.sa_handler = my_exit; 
    sigemptyset(&sigIntHandler.sa_mask); 
    sigIntHandler.sa_flags = 0; 
    sigaction(SIGINT, &sigIntHandler, NULL);

FILE *fp_h264 = NULL;
    FILE *fp_yuv = NULL;
   
   
    char input_name[MAX_FILE_NAME+1] = {0};
    char yuv_name[MAX_FILE_NAME+1] = {0};
    char h264_name[MAX_FILE_NAME+1] = {0};
    char rtmp_url[MAX_FILE_NAME+1] = {0};
   
    char video_size[20] = {0};
    strcpy(video_size, “320x240”);
    int bit_rate = 200; //kbit
   
    int   opt;
    while( (opt = getopt(argc,argv,“hvi:y:r:u:s🅱️”)) != -1 )     //解析結束時,返回-1
    {
        switch(opt)
        {
            case ‘b’:
                bit_rate = atol(optarg);
                break;
            case ‘h’:
                usage(argv[0]);
                return 0;
            case ‘v’:
                version();
                return 0;
            case ‘i’:
                strncpy(input_name, optarg, MAX_FILE_NAME);
                break;
            case ‘y’:
                strncpy(yuv_name, optarg, MAX_FILE_NAME);
                fp_yuv = fopen(yuv_name, “wb+”);
                break;
            case ‘r’:
                strncpy(h264_name, optarg, MAX_FILE_NAME);
                fp_h264 = fopen(h264_name, “wb+”);
                break;
            case ‘u’:
                strncpy(rtmp_url, optarg, MAX_FILE_NAME);
                break;
            case ‘s’:
                strncpy(video_size, optarg, 20);
                break;
        }
    }
   
    int ret = 0;
    av_register_all();
    avcodec_register_all();
    avformat_network_init();
    avdevice_register_all();
   
    if(strlen(rtmp_url)>0)
    {
        ret = RTMP264_Connect(rtmp_url);
        if(!ret)
        {
            printf(“RTMP264_Connect fail! %s\n”, rtmp_url);
            return -1;
        }
    }
   
    if(strstr(input_name, “.h264”)!=NULL)
    {
        fp_send1 = fopen(input_name, “rb”);
        RTMP264_Send(read_buffer1);
        return 0;
   }

int videoindex = -1;
   
    AVFormatContext *ifmt_ctx = NULL;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVInputFormat *ifmt = NULL; //輸入
    AVDictionary *format_opts;
    av_dict_set(&format_opts,“video_size”, video_size, 0);
  
    ifmt_ctx = avformat_alloc_context();
    ifmt = av_find_input_format(“video4linux2”);
    if(ifmt == NULL)
    {
        printf(“Couldn’t find video4linux2.\n”);
    }
    if(avformat_open_input(&ifmt_ctx, input_name, ifmt, &format_opts) != 0)
    {
        printf(“Couldn’t open input stream.\n”);
        return -1;
    }
    if(avformat_find_stream_info(ifmt_ctx, NULL) <0)
    {
        printf(“Couldn’t find stream information.\n”);
     return -1;
    }

int i;
    printf(“stream count=%d .\n”, ifmt_ctx->nb_streams);
    for(i=0; i<ifmt_ctx->nb_streams; i++)
    {      
     if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            printf(“codec_type:%d\n”, ifmt_ctx->streams[i]->codec->codec_type);
            videoindex=i;
            //break;
        }
    }
    if(videoindex == -1)
    {
         printf(“Couldn’t find a video stream.\n”);
         return -1;
    }

pCodecCtx = ifmt_ctx->streams[videoindex]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==NULL)
    {
        printf(“Codec not found.(沒有找到解碼器)\n”);
        return -1;
    }
    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
    {
        printf(“Could not open codec.(無法打開解碼器)\n”);
        return -1;
    }
  
   
 int inWidth, inHeight;  // Clipper Output Information
 int frameCnt = 0;
 
 // Encoder Parameters
 NX_VID_ENC_INIT_PARAM encInitParam;
 unsigned char *seqBuffer = (unsigned char *)malloc( MAX_SEQ_BUF_SIZE );
 NX_VID_ENC_HANDLE hEnc;
 NX_VID_ENC_IN encIn;
 NX_VID_ENC_OUT encOut;

long long totalSize = 0;
 double bitRate = 0.;
 long long vipTimeStamp;

int instanceIdx;

// Set Image & Clipper Information
 inWidth = pCodecCtx->width;
 inHeight = pCodecCtx->height;
 
 fprintf(stdout, “inWidth=%d, inHeight=%d\n”, inWidth, inHeight);
 
    // Initialize Encoder
 hEnc = NX_VidEncOpen( NX_AVC_ENC,  &instanceIdx);

memset( &encInitParam, 0, sizeof(encInitParam) );
 encInitParam.width = inWidth;
 encInitParam.height = inHeight;
 encInitParam.gopSize = 30/2;
 encInitParam.bitrate = bit_rate*1024;
 encInitParam.fpsNum = 30;
 encInitParam.fpsDen = 1;

encInitParam.chromaInterleave = 0;

// Rate Control
 encInitParam.enableRC = 1;  // Enable Rate Control
 encInitParam.disableSkip = 0; // Enable Skip
 encInitParam.maximumQp = 51; // Max Qunatization Scale
 encInitParam.initialQp = 10; // Default Encoder API ( enableRC == 0 )
 encInitParam.enableAUDelimiter = 1; // Enable / Disable AU Delimiter

NX_VID_MEMORY_INFO *hInImage = NULL;
    hInImage = NX_VideoAllocateMemory( 16, inWidth, inHeight, NX_MEM_MAP_LINEAR, FOURCC_MVS0 );

ret = NX_VidEncInit( hEnc, &encInitParam );
 if(ret != 0)
    {
        printf(“NX_VidEncInit=%d\n”, ret);
        goto end;
    }  
   
   
    int size;
    NX_VidEncGetSeqInfo( hEnc, seqBuffer, &size ); //SPS + PPS
    /*
    00000001 6742401e a680a03d 90000000  0168ce38 80
    SPS: 9字節 67 42 40 1e a6 80 a0 3d 90
    PPS: 4字節 68 ce 38 80
    */
   
 if(fp_h264 != NULL) fwrite( seqBuffer, 1, size, fp_h264 );
   
    ret = decode_sps_pps(&g_metaData, seqBuffer, size);

ret=SendVideoSpsPps(g_metaData.Pps,g_metaData.nPpsLen,g_metaData.Sps,g_metaData.nSpsLen);

AVFrame *pFrame,*pFrameYUV;
    pFrame=avcodec_alloc_frame();
    pFrameYUV=avcodec_alloc_frame();
    uint8_t *out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));
    printf(“File Information---------------------\n”);
    av_dump_format(ifmt_ctx,0,NULL,0);
    printf("-------------------------------------------------\n");

struct SwsContext *img_convert_ctx;
    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

int got_picture;
    AVPacket packetOut;
    unsigned int tick = 0; 
 unsigned int tick_gap = 30;
    while(!g_nStop && av_read_frame(ifmt_ctx, packet)>=0){
     if(packet->stream_index == videoindex){
  ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
  if(ret <0)
  {
      printf(“Decode Error.\n”);
  }
  if(got_picture)
  {
   sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
   int y_size = pCodecCtx->widthpCodecCtx->height;
   if(fp_yuv != NULL)
      {
          fwrite(pFrameYUV->data[0],1, y_size, fp_yuv); //Y
       fwrite(pFrameYUV->data[1],1, y_size/4, fp_yuv); //U
       fwrite(pFrameYUV->data[2],1, y_size/4, fp_yuv); //V
      }
   memcpy((unsigned char
)hInImage->luVirAddr, pFrameYUV->data[0], y_size);
   memcpy((unsigned char*)hInImage->cbVirAddr, pFrameYUV->data[1], y_size/4);
   memcpy((unsigned char*)hInImage->crVirAddr, pFrameYUV->data[2], y_size/4);
   
   encIn.pImage = hInImage;
            encIn.timeStamp = packet->pts;
            encIn.forcedIFrame = 0;
            encIn.forcedSkipFrame = 0;
            encIn.quantParam = 25;
           
            NX_VidEncEncodeFrame( hEnc, &encIn, &encOut );
            if(encOut.bufSize>0 )
   {
             /發送/
             //ret = SendH264Packet(encOut.outBuf, encOut.bufSize, encOut.frameType == PIC_TYPE_I, packet->pts);
                ret = SendH264Packet(encOut.outBuf,encOut.bufSize, encOut.frameType == PIC_TYPE_I, tick);
                tick +=tick_gap;
       //printf(“encOut.frameType:%d, encOut.bufSize:%ld\n”, encOut.frameType, encOut.bufSize);
    if(fp_h264 != NULL) fwrite( encOut.outBuf, 1, encOut.bufSize, fp_h264 );
    //dumpdata( encOut.outBuf, 16, “” );
    totalSize += encOut.bufSize;
    bitRate = (double)totalSize/(double)frameCnt*.8;
    //printf(“bitRate = %4.3f kbps\n”, bitRate*30/1024.);
   }
   frameCnt ++;
  }
     }
     av_free_packet(packet);
    }
    if( hInImage != NULL )
 {
  NX_FreeVideoMemory( hInImage );
 }
    sws_freeContext(img_convert_ctx);
   
    if(fp_yuv != NULL) fclose(fp_yuv);
    if(fp_h264 != NULL) fclose(fp_h264);

end:
    free(g_metaData.Sps);
 free(g_metaData.Pps);
 
    NX_VidEncClose( hEnc );
    av_free(out_buffer);
    av_free(pFrameYUV);
    avcodec_close(pCodecCtx);
    avformat_close_input(&ifmt_ctx);
    return 0;
}
————————————————
版權聲明:本文爲CSDN博主「qiek」的原創文章,遵循 CC 4.0 BY-SA 版權協議,轉載請附上原文出處鏈接及本聲明。
原文鏈接:https://blog.csdn.net/qiek/article/details/50974805

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章