1. 程式人生 > >音視訊開發——流媒體資料傳輸RTP(三)

音視訊開發——流媒體資料傳輸RTP(三)

iOS音視訊開發相關文章:


實時傳送協議(Real-time Transport Protocol或簡寫RTP,也可以寫成RTTP)是一個網路傳輸協議。

RTP協議是通過UDP層傳輸的,在本例中仍然使用CocoaAsyncSocket庫實現UDP協議。

1、註冊UDP監聽

#import "RTPReceiver.h"
#import "CocoaAsyncSocket/GCD/GCDAsyncUdpSocket.h"
#import "RTPPacket.h"

@interface RTPReceiver() <GCDAsyncUdpSocketDelegate, RTPPacketDelegate> {
    
    int      _rtpPort;
    dispatch_queue_t _rtpQueue;
    GCDAsyncUdpSocket   *_rtpSocket;
    RTPPacket       *_rtpPacket;
}

@end

@implementation RTPReceiver

- (instancetype)initWithPort:(int)port {

    if (self == [super init]) {
        _rtpPort = port;
        
        _rtpQueue = dispatch_queue_create("rtpSocketQueue", NULL);
        _rtpSocket = [[GCDAsyncUdpSocket alloc] initWithDelegate:self delegateQueue:_rtpQueue];
        
        NSError *error;
        int connectRect = [_rtpSocket bindToPort:_rtpPort error:&error];
        if (!connectRect) {
            NSLog(@"ERROR!!! bind upd port: %@", error.localizedDescription);
        }
        
        _rtpPacket = [[RTPPacket alloc]init];
        _rtpPacket.delegate = self;
    }
    return self;
}

- (void)startReceive {
    
    NSError *error;
    [_rtpSocket beginReceiving:&error];
    if (error) {
        NSLog(@"ERROR!!! receive RTP: %@", error.localizedDescription);
    }
}

#pragma mark GCDAsyncUdpSocket Delegate
- (void)udpSocket:(GCDAsyncUdpSocket *)sock didReceiveData:(NSData *)data fromAddress:(NSData *)address withFilterContext:(id)filterContext {

    [_rtpPacket addNalu:data];
}

#pragma mark RTP Packet Delegate
- (void)DidPacketFrame:(uint8_t *)frame size:(int)size sequence:(int)sequ {

    dispatch_async(dispatch_get_main_queue(), ^{
        NSDictionary *dict = @{@"data":[NSData dataWithBytes:frame length:size],
                               @"size":[NSNumber numberWithInt:size]};
        [[NSNotificationCenter defaultCenter] postNotificationName:@"client" object:dict];
    });
}

@end

2、接收到RTP資料,對其進行解析,並封裝成一幀一幀
- (void)addNalu:(NSData *)rtpData {

    bzero(&rtpHeaer, sizeof(rtpHeaer));
    
    
    uint8_t *dataByte = (uint8_t *)[rtpData bytes];
    
    rtpHeaer.version = (dataByte[0] & 0xc0) >> 6;
    rtpHeaer.padding = (dataByte[0] & 0x20 >> 5) == 1;
    rtpHeaer.extension = (dataByte[0] & 0x10 >> 4) == 1;
    rtpHeaer.payloadType = dataByte[1] & 0x7f;
    rtpHeaer.sequenceNumber = twoByte(dataByte + 2);
    rtpHeaer.timeStamp = fourByte(dataByte + 4);
    
    [self loadNalu:rtpData];
}

- (void)loadNalu:(NSData *)rtpData {

    char NaluHeader[2];
    [rtpData getBytes:NaluHeader range:NSMakeRange(RTPHeaderSize, 2)];
    int fuIndicator = NaluHeader[0] & 0x1f;
    switch (fuIndicator) {
        case 7:
            [sliceArray removeAllObjects];
        case 8:
        {
            NSData *subData = [rtpData subdataWithRange:NSMakeRange(RTPHeaderSize, rtpData.length - RTPHeaderSize)];
            NalUnit *unit = [[NalUnit alloc] initWithData:subData size:rtpData.length - RTPHeaderSize sequence:rtpHeaer.sequenceNumber];
            [sliceArray addObject:unit];
        }
            break;
        case 28:
        {
            int frameType = NaluHeader[1] & 0x1f;
            if (frameType == 5) {
                
                int frameLength = rtpData.length - RTPHeaderSize - 2;
                NSData *subData = [rtpData subdataWithRange:NSMakeRange(RTPHeaderSize + 2, frameLength)];
                NalUnit *unit = [[NalUnit alloc] initWithData:subData size:frameLength sequence:rtpHeaer.sequenceNumber];
                [sliceArray addObject:unit];
                
                int ser = (NaluHeader[1] & 0xe0) >> 5;
                if (ser == 2) {   //010 分片結束標誌
                    //組裝成frame,回撥
                    [self packetAndSendIFrame];
                }
            } else if(frameType == 1){

                int ser = (NaluHeader[1] & 0xe0) >> 5;
                if (ser == 4) {   //100 分片開始
                    [sliceArray removeAllObjects];
                }
                
                int frameLength = rtpData.length - RTPHeaderSize - 2;
                NSData *subData = [rtpData subdataWithRange:NSMakeRange(RTPHeaderSize + 2, frameLength)];
                NalUnit *unit = [[NalUnit alloc] initWithData:subData size:frameLength sequence:rtpHeaer.sequenceNumber];
                [sliceArray addObject:unit];
                
                if (ser == 2) {
                    [self packetAndSendPFrame];
                }
            }
        }
            break;
        case 1:
            if (self.delegate && [self.delegate respondsToSelector:@selector(DidPacketFrame:size:sequence:)]) {
                
                int frameLength = rtpData.length - RTPHeaderSize + 4;
                uint8_t *buf = (uint8_t *)malloc(frameLength);
                memcpy(buf, startCode, 4);
                NSData *fData = [rtpData subdataWithRange:NSMakeRange(RTPHeaderSize, frameLength - 4)];
                memcpy(buf + 4, [fData bytes], frameLength - 4);
                [self.delegate DidPacketFrame:buf size:frameLength sequence:rtpHeaer.sequenceNumber];
                free(buf);
                buf = NULL;
            }
            break;
    }
    
    
}

通過上述步驟,組裝好的每幀資料就能送去解碼播放了。

歡迎大家加入iOS音視訊開發的QQ群:331753091