RTP streaming in SIP



我对SIP和RTP都是新手。我已经成功地创建了一个SIP呼叫,但我仍然没有语音会话。

我明白我必须创建一个RTP流并发送数据包。但我无法决定从哪里开始。我找到了JMf库(jar),但我无法理解如何使用它们。我还想在交易期间向我呼叫的人播放音频。

我是否必须在SIP INVITE内启动RTP会话或我是否必须在呼叫被单独应答后创建RTP会话。我找不到问题的答案。

我也想知道我如何创建一个RTP会话,我正在做简单的Java编程,我找到了一个JMf教程,但与安装。我想知道它是否可能与简单的Java编程。我有jmf-2.1.1 .jar文件。我想知道如何使用它。

public SoundSenderDemo(boolean isLocal, int RTPsocket)  {
        DatagramSocket rtpSocket = null;
        DatagramSocket rtcpSocket = null;
        int socket = RTPsocket;

        try {
            rtpSocket = new DatagramSocket(socket);
            rtcpSocket = new DatagramSocket(socket+1);
        } catch (Exception e) {
            System.out.println("RTPSession failed to obtain port");
        }
        rtpSession = new RTPSession(rtpSocket, rtcpSocket);
        rtpSession.RTPSessionRegister(this,null, null);
        Participant p = new Participant("sip:username@password",socket,(socket + 1));
//      rtpSession.addParticipant(p);
        System.out.println("CNAME: " + rtpSession.CNAME());
        System.out.println("RTPSession: " + rtpSession.toString());
        System.out.println("Participant: " + rtpSession.getParticipants());
        System.out.println("unicast Receivers: " + rtpSession.getUnicastReceivers());
        this.local = isLocal;
    }
public void run() {
        if(RTPSession.rtpDebugLevel > 1) {
            System.out.println("-> Run()");
        } 
        File soundFile = new File(filename);
        if (!soundFile.exists()) {
            System.err.println("Wave file not found: " + filename);
            return;
        }
        AudioInputStream audioInputStream = null;
        try {
            audioInputStream = AudioSystem.getAudioInputStream(soundFile);
        } catch (UnsupportedAudioFileException e1) {
            e1.printStackTrace();
            return;
        } catch (IOException e1) {
            e1.printStackTrace();
            return;
        }
        //AudioFormat format = audioInputStream.getFormat();
        AudioFormat.Encoding encoding =  new AudioFormat.Encoding("PCM_SIGNED");
        AudioFormat format = new AudioFormat(encoding,((float) 8000.0), 16, 1, 2, ((float) 8000.0) ,false);
        System.out.println(format.toString());

        if(! this.local) {
            // To time the output correctly, we also play at the input:
            auline = null;
            DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
            try {
                auline = (SourceDataLine) AudioSystem.getLine(info);
                auline.open(format);
            } catch (LineUnavailableException e) {
                e.printStackTrace();
                return;
            } catch (Exception e) {
                e.printStackTrace();
                return;
            }
            if (auline.isControlSupported(FloatControl.Type.PAN)) {
                FloatControl pan = (FloatControl) auline
                .getControl(FloatControl.Type.PAN);
                if (this.curPosition == Position.RIGHT)
                    pan.setValue(1.0f);
                else if (this.curPosition == Position.LEFT)
                    pan.setValue(-1.0f);
            }
            auline.start();
        }
        int nBytesRead = 0;
        byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
        long start = System.currentTimeMillis();
        try {
            while (nBytesRead != -1 && pktCount < 200) {
                nBytesRead = audioInputStream.read(abData, 0, abData.length);
                if (nBytesRead >= 0) {
                    rtpSession.sendData(abData);
                    //if(!this.local) { 
                    auline.write(abData, 0, abData.length);
                    //dataCount += abData.length;
                    //if(pktCount % 10 == 0) {
                    //  System.out.println("pktCount:" + pktCount + " dataCount:" + dataCount);
                    //
                    //  long test = 0;
                    //  for(int i=0; i<abData.length; i++) {
                    //      test += abData[i];
                    //  }
                    //  System.out.println(Long.toString(test));
                    //}
                    pktCount++;
                    //if(pktCount == 100) {
                    //  System.out.println("Time!!!!!!!!! " + Long.toString(System.currentTimeMillis()));
                    //}
                    //System.out.println("yep");
                }
                if(pktCount == 100) {
                    Enumeration<Participant> iter = this.rtpSession.getParticipants();
                    //System.out.println("iter " + iter.hasMoreElements());
                    Participant p = null;
                    while(iter.hasMoreElements()) {
                        p = iter.nextElement();
                        String name = "name";
                        byte[] nameBytes = name.getBytes();
                        String data= "abcd";
                        byte[] dataBytes = data.getBytes();

                        int ret = rtpSession.sendRTCPAppPacket(p.getSSRC(), 0, nameBytes, dataBytes);
                        System.out.println("!!!!!!!!!!!! ADDED APPLICATION SPECIFIC " + ret);
                        continue;
                    }
                    if(p == null)
                        System.out.println("No participant with SSRC available :(");
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
            return;
        }
        System.out.println("Time: " + (System.currentTimeMillis() - start)/1000 + " s");
        try { Thread.sleep(200);} catch(Exception e) {}
        this.rtpSession.endSession();
        try { Thread.sleep(2000);} catch(Exception e) {}
        if(RTPSession.rtpDebugLevel > 1) {
            System.out.println("<- Run()");
        } 
    }

发送ACK时

dialog.sendAck(ackRequest);
//                  System.out.println(ackRequest.toString());
                logger.debug(ackRequest.toString());
                aDemo = new SoundSenderDemo(false, RTPsocket);
                RTPstart();

public void RTPstart(){
        // Start RTP Session
        String file = "C:/universAAL/workspaces/SIPfinaltest withRTP/SIPfinaltest/JSIP/garfield_converted.wav";
//      SoundSenderDemo aDemo = new SoundSenderDemo(false);
        aDemo.filename = args[0];
        aDemo.run();
        System.out.println("pktCount: " + aDemo.pktCount);
    }

我还在邀请中设置了:

String sdpData = "v=0n" + 
                    "o=user1 795808818 480847547 IN IP4 "+localIP+"n" + 
                    "s=-n" + 
                    "c=IN IP4 "+localIP+"n" + 
                    "t=0 0n" + 
                    "m=audio 8000 RTP/AVP 0 8 101n" + 
                    "a=rtpmap:0 PCMU/8000n" + 
                    "a=rtpmap:8 PCMA/8000n" + 
                    "a=rtpmap:101 telephone-event/8000n" + 
                    "a=sendrecv";
             byte[] contents = sdpData.getBytes();

这是响应:

SIP/2.0 200 OK
Via: SIP/2.0/UDP 10.99.134.149:5060;branch=z9hG4bK-333831-44ef6fc075d847c6420a0f95b2022345;received=10.99.134.149;rport=5060
From: <sip:top160_167@10.99.64.2>;tag=-1209613008
To: <sip:86940140@10.99.64.2>;tag=as12f64e9a
Call-ID: 5ac297147c47e8e20cc148dda4f350cf@10.99.134.149
CSeq: 5 INVITE
Server: Asterisk PBX 10.5.1
Allow: INVITE,ACK,CANCEL,OPTIONS,BYE,REFER,SUBSCRIBE,NOTIFY,INFO,PUBLISH
Supported: replaces,timer
Contact: <sip:86940140@10.99.64.2:5060>
Content-Type: application/sdp
Content-Length: 255
v=0
o=root 532626251 532626252 IN IP4 10.99.64.2
s=Asterisk PBX 10.5.1
c=IN IP4 10.99.64.2
t=0 0
m=audio 7758 RTP/AVP 8 101
a=rtpmap:8 PCMA/8000
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-16
a=silenceSupp:off - - - -
a=ptime:20
a=sendrecv

如果您对使用简单的Java编程建立sip/RTP会话感兴趣,请查看http://peers.sourceforge.net/。请看一下文档页。

JMF已经过时了。网上有很多java rtp栈,只要谷歌一下"java rtp栈"就可以了。

何时启动rtp会话?这取决于,有时你只能在收到200 OK响应时启动它,有时你必须在收到183响应时更早创建它。

你也可以在发送邀请时创建rtp会话,然后在收到邀请时用正确的信息(远程ip地址,端口,编解码器)更新它。

相关内容

  • 没有找到相关文章

最新更新