2017-04-23 311 views
0

我正嘗試從A點發送音頻流到kurento媒體服務器,並使用gstreamer在點B接收該音頻流。 我想要實現的應該是這樣的:使用gstreamer從kurento媒體服務器收聽音頻流

(POINT A)----用GSTREAMER發送音頻流--->(KURENTO)---- AUDIO STREAM ----->(POINT B )---用GSTREMAER獲取音頻----!

到目前爲止,我寫的代碼波紋管:

function createOutGoingAudioStream() { 

var sdpOffer = " v=0\r\n" 
     + "o=- 0 0 IN IP4 0.0.0.0\r\n" 
     + "c=IN IP4 0.0.0.0\r\n" 
     + "t=0 0\r\n" 
     + "m=audio 5005 RTP/AVP 0\r\n" 
     + "a=rtpmap:0 PCMU/8000\r\n"; 

var pipeline; 
console.log(); 
console.log("Starting Audio Stream from Command Post.....");  

// get kurento client 
    getKurentoClient(function(error, kurentoClient) { 
     if (error) { 
      return callback(error); 
     } 


    // create media pipe line 
     kurentoClient.create('MediaPipeline', function(error, pipeline) { 
      if (error) { 
        return callback(error); 
      } 

      // create first rtpEndpoint for the incoming audio stream  
     pipeline.create('RtpEndpoint', function(error, rtpEndpoint) { 
       if (error) { 
        pipeline.release(); 
       return callback(error); 
       } 
      console.log('audio RTP Endpoint created successfully!'); 

      rtpEndpoint.processOffer(sdpOffer, function(error, sdpAnswer) { 
          if (error) { 
            pipeline.release(); 
            return callback(error); 
          } 
       console.log(sdpAnswer); 
       console.log(); 
         // Start a gstreamer audio stream over the audio port that we got from the kurento server 
         var jsonSdpAnswer = transform.parse(sdpAnswer); 
         var port = jsonSdpAnswer.media[0].port; 

       console.log("Starting audio stream to the kurento server: "); 
            console.log('sh gstreamer.sh ' + port + ' > log.txt') 

       exec('sh gstreamer.sh ' + port + ' > log.txt', function(err, stdout, stderr) { 
          if (err) { 
            console.error(err); 
            return; 
          } 
         //if all is ok nothing wil prompt to the console 
         console.log(stdout); 
         }); 
      }); 

      // create second rtpEndpoint for the outgoing to the odroid's audio stream  
         pipeline.create('RtpEndpoint', function(error, outRtpEndpoint) { 
           if (error) { 
             pipeline.release(); 
             return callback(error); 
           } 
           console.log('second RTP Endpoint created successfully!'); 


       rtpEndpoint.connect(outRtpEndpoint, function(error){ 
         if(error) return onError(error); 
       }); 
       outRtpEndpoint.generateOffer(function(error,offerSdp){ 
        if(error) return onError(error); 
        console.log("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@") 
        console.log(offerSdp); 
       }); 
         }); 
     }); 
    }); 
}); 
} 

我從kurento服務器outRtpEndpoint的sdpOffer得到和它看起來像這樣:

sdp Offer

不要緊,我我試圖做的是爲了聆聽那個流,它只是不想工作。我究竟做錯了什麼 ?

我真的很感激任何幫助。

謝謝!

回答

0

我能夠解決這個問題。我在A點從瀏覽器(webrtcEndpoint)獲取源音頻流,並將該端點連接到rtpEndpoint,然後從那裏獲得流到點B. (Point A - web browser - > webrtcEndpoint) - >(Kurento - > rtpEndpoint ) - >(B點 - fplay)。

function createOutGoingAudioStream(sessionId,ws, sdpOffer, callback){ 
if (!sessionId) { 
    return callback('Cannot use undefined sessionId'); 
} 

getKurentoClient(function(error, kurentoClient) { 
    if (error) { 
     return callback(error); 
    } 

    kurentoClient.create('MediaPipeline', function(error, pipeline) { 
     if (error) { 
      return callback(error); 
     } 

     createMediaElements(pipeline, ws, function(error, webRtcEndpoint) { 
      if (error) { 
       pipeline.release(); 
       return callback(error); 
      } 

      if (candidatesQueue[sessionId]) { 
       while(candidatesQueue[sessionId].length) { 
        var candidate = candidatesQueue[sessionId].shift(); 
        webRtcEndpoint.addIceCandidate(candidate); 
       } 
      } 

      connectMediaElements(webRtcEndpoint, function(error) { 
       if (error) { 
        pipeline.release(); 
        return callback(error); 
       } 

       webRtcEndpoint.on('OnIceCandidate', function(event) { 
        var candidate = kurento.getComplexType('IceCandidate')(event.candidate); 
        ws.send(JSON.stringify({ 
         id : 'iceCandidate', 
         candidate : candidate 
        })); 
       }); 

       webRtcEndpoint.processOffer(sdpOffer, function(error, sdpAnswer) { 
        if (error) { 
         pipeline.release(); 
         return callback(error); 
        } 

        sessions[sessionId] = { 
         'pipeline' : pipeline, 
         'webRtcEndpoint' : webRtcEndpoint 
        } 
        return callback(null, sdpAnswer); 
       }); 

       webRtcEndpoint.gatherCandidates(function(error) { 
        if (error) { 
         return callback(error); 
        } 



          var sdp = 'v=0'; 
          sdp += '\nc=IN IP4 IP_WHERE_YOU_WANT_TO_GET_THE_STREAM'; // this is the ip where the kurento should stream to 
          sdp += '\nm=audio 8080 RTP/AVP 0'; // at port 8080 you will have an audio stream 
          sdp += '\na=rtpmap:0 PCMU/8000'; 
          sdp += '\nm=video 9090 RTP/AVP 101'; // at port 9090 you will have a video stream 
          sdp += '\na=rtpmap:101 H264/90000'; 

          pipeline.create('RtpEndpoint', function(err, rtpEndpoint){ 
          rtpEndpoint.processOffer(sdp, function(error, sdpAnswer) { 
            if (error) { 
              return callback(error); 
            } 

       console.log("################################################"); 
       console.log(sdpAnswer); 
       console.log("################################################"); 
          }); 


      webRtcEndpoint.connect(rtpEndpoint, function(err, rtpEndpoint) { if(err) { console.log("Error!"); } }); 
          }); 

     }); 
      }); 
     }); 
    }); 
}); 
} 

在你流,你可以聽流與計算機:

ffplay rtp://IP_FROM_THE_SDP_OFFER_IN_THE_CODE_ABOVE:AUDIO_PORT_FROM_THE_SDP_OFFER_FROM_THE_CODE_ABOVE