2014-12-03 348 views
4

我使用的是基於ARM9的SoC和DSP處理器的定製德州儀器基於OMAP-L138的電路板。它連接到相機鏡頭。我想要做的是捕獲發送到dsp處理器的實時視頻流,以便通過8192字節的數據包通過uPP發送H264編碼。我想使用Live555提供的testH264VideoStreamer通過RTSP實時傳輸H264編碼的視頻。我已經修改了代碼如下所示:使用Live555從連接到H264編碼器的IP攝像頭流式傳輸實時視頻

#include <liveMedia.hh> 
#include <BasicUsageEnvironment.hh> 
#include <GroupsockHelper.hh> 
#include <stdio.h> 
#include <unistd.h> 
#include <stdlib.h> 
#include <fcntl.h> 
#include <string.h> 
#include <errno.h> 
#include <string.h> 
#include <unistd.h> //to allow read() function 




UsageEnvironment* env; 
H264VideoStreamFramer* videoSource; 
RTPSink* videoSink; 


//------------------------------------------------------------------------------- 
/* Open File Descriptor*/ 
int stream = open("/dev/upp", O_RDONLY); 
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */ 
static uint8_t buf[8192]; 
//------------------------------------------------------------------------------ 


//------------------------------------------------------------------------------ 
// Execute play function as a forwarding mechanism 
//------------------------------------------------------------------------------ 
void play(); // forward 


//------------------------------------------------------------------------------ 
// MAIN FUNCTION/ENTRY POINT 
//------------------------------------------------------------------------------ 
int main(int argc, char** argv) 
{ 
    // Begin by setting up our live555 usage environment: 
    TaskScheduler* scheduler = BasicTaskScheduler::createNew(); 
    env = BasicUsageEnvironment::createNew(*scheduler); 

    // Create 'groupsocks' for RTP and RTCP: 
    struct in_addr destinationAddress; 
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); 
    // Note: This is a multicast address. If you wish instead to stream 
    // using unicast, then you should use the "testOnDemandRTSPServer" 
    // test program - not this test program - as a model. 

    const unsigned short rtpPortNum = 18888; 
    const unsigned short rtcpPortNum = rtpPortNum+1; 
    const unsigned char ttl = 255; 

    const Port rtpPort(rtpPortNum); 
    const Port rtcpPort(rtcpPortNum); 

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl); 
    rtpGroupsock.multicastSendOnly(); // we're a SSM source 
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl); 
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source 

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock': 
    OutPacketBuffer::maxSize = 1000000; 
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96); 

    // Create (and start) a 'RTCP instance' for this RTP sink: 
    const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share 
    const unsigned maxCNAMElen = 100; 
    unsigned char CNAME[maxCNAMElen+1]; 
    gethostname((char*)CNAME, maxCNAMElen); 
    CNAME[maxCNAMElen] = '\0'; // just in case 
    RTCPInstance* rtcp 
    = RTCPInstance::createNew(*env, &rtcpGroupsock, 
       estimatedSessionBandwidth, CNAME, 
       videoSink, NULL /* we're a server */, 
       True /* we're a SSM source */); 
    // Note: This starts RTCP running automatically 

    /*Create RTSP SERVER*/ 
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); 
    if (rtspServer == NULL) 
    { 
     *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; 
     exit(1); 
    } 
    ServerMediaSession* sms 
     = ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" , 
      "Session streamed by \"testH264VideoStreamer\"", 
         True /*SSM*/); 
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp)); 
    rtspServer->addServerMediaSession(sms); 

    char* url = rtspServer->rtspURL(sms); 
    *env << "Play this stream using the URL \"" << url << "\"\n"; 
    delete[] url; 

    // Start the streaming: 
    *env << "Beginning streaming...\n"; 
    play(); 

    env->taskScheduler().doEventLoop(); // does not return 

    return 0; // only to prevent compiler warning 
} 



//---------------------------------------------------------------------------------- 
// afterPlaying() -> Defines what to do once a buffer is streamed 
//---------------------------------------------------------------------------------- 
void afterPlaying(void* /*clientData*/) 
{ 
    *env << "...done reading from upp buffer\n"; 
    //videoSink->stopPlaying(); 
    //Medium::close(videoSource); 
    // Note that this also closes the input file that this source read from. 

    // Start playing once again to get the next stream  
    play(); 

    /* We don't need to close the dev as long as we're reading from it. But if we do, use: close("/dev/upp", O_RDWR);*/ 

} 



//---------------------------------------------------------------------------------------------- 
// play() Method -> Defines how to read and what to make of the input stream 
//---------------------------------------------------------------------------------------------- 
void play() 
{ 



    /* Read nbytes of buffer (sizeof buf) from the filedescriptor stream and assign them to address where buf is located */ 
    read(stream, &buf, sizeof buf); 
    printf("Reading from UPP in to Buffer"); 

    /*Open the input file as a 'byte-stream file source': */ 
    ByteStreamMemoryBufferSource* buffSource 
     = ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/); 
    /*By passing False in the above creatNew() method means that the buffer would be read at once */ 

    if (buffSource == NULL) 
    { 
     *env << "Unable to read from\"" << "Buffer" 
      << "\" as a byte-stream source\n"; 
      exit(1); 
    } 

    FramedSource* videoES = buffSource; 
    // Create a framer for the Video Elementary Stream: 
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False); 
    // Finally, start playing: 
    *env << "Beginning to read from UPP...\n"; 
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink); 
} 

的問題是,雖然代碼編譯成功,但我無法得到所需的輸出。 VLC播放器上的RTSP流處於播放模式,但我看不到任何視頻。我很感激在這件事上的任何幫助。我的描述可能會含糊不清,但我很樂意進一步解釋所需的任何部分。

回答

8

好吧,我想出了需要完成的工作,併爲所有可能面臨類似問題的人寫作。我需要做的是修改我的testH264VideoStreamer.cpp和DeviceSource.cpp文件,以便它直接從設備讀取數據(在我的情況下,它是自定義am1808板),將其存儲在緩衝區中並進行流式處理。我所做的改變是:

testH264VideoStreamer.cpp

#include <liveMedia.hh> 
#include <BasicUsageEnvironment.hh> 
#include <GroupsockHelper.hh> 
#include <stdio.h> 
#include <unistd.h> 
#include <stdlib.h> 
#include <fcntl.h> 
#include <string.h> 
#include <errno.h> 
#include <string.h> 
#include <unistd.h> //to allow read() function 




UsageEnvironment* env; 

H264VideoStreamFramer* videoSource; 
RTPSink* videoSink; 

void play(); // forward 
//------------------------------------------------------------------------- 
//Entry Point -> Main FUNCTION 
//------------------------------------------------------------------------- 

int main(int argc, char** argv) { 
    // Begin by setting up our usage environment: 
    TaskScheduler* scheduler = BasicTaskScheduler::createNew(); 
    env = BasicUsageEnvironment::createNew(*scheduler); 

    // Create 'groupsocks' for RTP and RTCP: 
    struct in_addr destinationAddress; 
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); 
    // Note: This is a multicast address. If you wish instead to stream 
    // using unicast, then you should use the "testOnDemandRTSPServer" 
    // test program - not this test program - as a model. 

    const unsigned short rtpPortNum = 18888; 
    const unsigned short rtcpPortNum = rtpPortNum+1; 
    const unsigned char ttl = 255; 

    const Port rtpPort(rtpPortNum); 
    const Port rtcpPort(rtcpPortNum); 

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl); 
    rtpGroupsock.multicastSendOnly(); // we're a SSM source 
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl); 
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source 

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock': 
    OutPacketBuffer::maxSize = 600000; 
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96); 

    // Create (and start) a 'RTCP instance' for this RTP sink: 
    const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b/w share 
    const unsigned maxCNAMElen = 100; 
    unsigned char CNAME[maxCNAMElen+1]; 
    gethostname((char*)CNAME, maxCNAMElen); 
    CNAME[maxCNAMElen] = '\0'; // just in case 
    RTCPInstance* rtcp 
    = RTCPInstance::createNew(*env, &rtcpGroupsock, 
       estimatedSessionBandwidth, CNAME, 
       videoSink, NULL /* we're a server */, 
       True /* we're a SSM source */); 
    // Note: This starts RTCP running automatically 

    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); 
    if (rtspServer == NULL) { 
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; 
    exit(1); 
    } 
    ServerMediaSession* sms 
    = ServerMediaSession::createNew(*env, "ipcamera","UPP Buffer" , 
      "Session streamed by \"testH264VideoStreamer\"", 
         True /*SSM*/); 
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp)); 
    rtspServer->addServerMediaSession(sms); 

    char* url = rtspServer->rtspURL(sms); 
    *env << "Play this stream using the URL \"" << url << "\"\n"; 
    delete[] url; 

    // Start the streaming: 
    *env << "Beginning streaming...\n"; 
    play(); 

    env->taskScheduler().doEventLoop(); // does not return 

    return 0; // only to prevent compiler warning 
} 
//---------------------------------------------------------------------- 
//AFTER PLAY FUNCTION CALLED HERE 
//---------------------------------------------------------------------- 
void afterPlaying(void* /*clientData*/) 
{ 

    play(); 
} 
//------------------------------------------------------------------------ 
//PLAY FUNCTION() 
//------------------------------------------------------------------------ 
void play() 
{ 


     // Open the input file as with Device as the source: 
    DeviceSource* devSource 
     = DeviceSource::createNew(*env); 
    if (devSource == NULL) 
    { 

      *env << "Unable to read from\"" << "Buffer" 
      << "\" as a byte-stream source\n"; 
      exit(1); 
    } 

    FramedSource* videoES = devSource; 

    // Create a framer for the Video Elementary Stream: 
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False); 

    // Finally, start playing: 
    *env << "Beginning to read from UPP...\n"; 
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink); 
} 

DeviceSource.cpp

#include "DeviceSource.hh" 
#include <GroupsockHelper.hh> // for "gettimeofday()" 
#include <stdio.h> 
#include <unistd.h> 
#include <stdlib.h> 
#include <fcntl.h> 
#include <string.h> 
#include <errno.h> 
#include <string.h> 
#include <unistd.h> 

//static uint8_t *buf = (uint8_t*)malloc(102400); 
static uint8_t buf[8192]; 
int upp_stream; 
//static uint8_t *bufPtr = buf; 

DeviceSource* 
DeviceSource::createNew(UsageEnvironment& env) 
{ 

    return new DeviceSource(env); 
} 

EventTriggerId DeviceSource::eventTriggerId = 0; 

unsigned DeviceSource::referenceCount = 0; 

DeviceSource::DeviceSource(UsageEnvironment& env):FramedSource(env) 
{ 

    if (referenceCount == 0) 
    { 

     upp_stream = open("/dev/upp",O_RDWR); 

    } 
    ++referenceCount; 

    if (eventTriggerId == 0) 
    { 
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0); 
    } 
} 

DeviceSource::~DeviceSource(void) { 
    --referenceCount; 
    envir().taskScheduler().deleteEventTrigger(eventTriggerId); 
    eventTriggerId = 0; 

    if (referenceCount == 0) 
    { 

    } 
} 

int loop_count; 

void DeviceSource::doGetNextFrame() 
{ 

    //for (loop_count=0; loop_count < 13; loop_count++) 
    //{ 
     read(upp_stream,buf, 8192); 

     //bufPtr+=8192; 

    //} 
    deliverFrame(); 

} 

void DeviceSource::deliverFrame0(void* clientData) 
{ 
    ((DeviceSource*)clientData)->deliverFrame(); 
} 

void DeviceSource::deliverFrame() 
{ 


    if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet 

    u_int8_t* newFrameDataStart = (u_int8_t*) buf;    //(u_int8_t*) buf; //%%% TO BE WRITTEN %%% 
    unsigned newFrameSize = sizeof(buf); //%%% TO BE WRITTEN %%% 

    // Deliver the data here: 
    if (newFrameSize > fMaxSize) { 
    fFrameSize = fMaxSize; 
    fNumTruncatedBytes = newFrameSize - fMaxSize; 
    } else { 
    fFrameSize = newFrameSize; 
    } 
    gettimeofday(&fPresentationTime, NULL); 
    memmove(fTo, newFrameDataStart, fFrameSize); 
    FramedSource::afterGetting(this); 
} 

編譯代碼與這些修改後,我能夠在接收視頻流vlc播放器。

相關問題