iOS直播App(解码篇)

下面我们结合代码重点介绍硬解码:

    -(void)viewDidLoad {
        [super viewDidLoad];
    
        //1.创建NSInputStream对象
        NSString *filePath = [[NSBundle mainBundle]     pathForResource:@"abc.h264" ofType:nil];
        self.inputStream = [NSInputStream     inputStreamWithFileAtPath:filePath];
    
        //2.创建定时器
        CADisplayLink *displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(readFrame)];
        //2.1.设置执行频率(自我理解,2毫秒执行一次,即一秒钟执行30次)
        displayLink.frameInterval = 2;
        [displayLink addToRunLoop:[NSRunLoop mainRunLoop]     forMode:NSRunLoopCommonModes];
        [displayLink setPaused:YES];
        self.displayLink = displayLink;
    
        //3.创建一个线程, 用于读取被编码后的数据, 解码数据
    self.queue = dispatch_get_global_queue(0, 0);
    
        //4.创建展示的layer
        AAPLEAGLLayer *layer = [[AAPLEAGLLayer alloc]     initWithFrame:self.view.bounds];
        [self.view.layer addSublayer:layer];
        self.previewLayer = layer;
    }

    //读取被编码的数据
    - (void)readFrame{

        dispatch_sync(self.queue, ^{
            //1.读取一个NALU数据
            [self readPacket];
        
            //2.判断读取的数据是否为空,如果数据未NULL,就直接结束
            if (packetBuffer == NULL || packetSize == 0){
                [self.displayLink setPaused:YES];
                [self.displayLink invalidate];
                [self.inputStream close];
                //不是直接return就行了吗,然后继续向内存中读取信息
                return;
            }
        
            //3.根据NALU的不同类型做出不同处理
            //sps pps I帧 其他帧
            //3.1.内存地址由系统端模式转化为大端模式
            uint32_t nalSize = (uint32_t)(packetSize - 4);
            uint32_t *pNalSize = (uint32_t *)packetBuffer;
            *pNalSize = CFSwapInt32HostToBig(nalSize);
        
            //3.2.取得NALU单元的第五个字节, 然后只取后五位
            int nalType = packetBuffer[4] & 0x1F;
        
            CVImageBufferRef imageBuffer = NULL;
        
            switch (nalType) {
                case 0x07:
                    //开辟地址记录sps
                    mSPSSize = packetSize - 4;
                    pSPS = malloc(mSPSSize);
                    memcpy(pSPS, packetBuffer + 4, mSPSSize);
                    break;
                case 0x08:
                    //开辟地址记录pps
                    mPPSSize = packetSize - 4;
                    pPPS = malloc(mPPSSize);
                    memcpy(pPPS, packetBuffer + 4, mPPSSize);
                    break;
                case 0x05:
                    //I帧 根据SPS和PPS直接初始化Decompressionsession, 每一个        Decompressionsession都是不同的, 然后解码
                    [self initDecompressionsession];
                    imageBuffer = [self decodeFrame];
                    break;

                    //其他帧 直接解码
                default:
                    imageBuffer = [self decodeFrame];
                    break;
            }
        //4.将解码数据进行展示
        if (imageBuffer != NULL){
        
            dispatch_async(dispatch_get_main_queue(), ^{
               
                self.previewLayer.pixelBuffer = imageBuffer;
                CFRelease(imageBuffer);
            });
        }
    });
}

//读取一个NALU数据
- (void)readPacket{

    //1.把之前的packetBuffer清空
    if (packetSize != 0 || packetBuffer != NULL){
    packetSize = 0;
    free(packetBuffer);
    packetBuffer = NULL;
    }
    
    //2.开始读取一定长度的数据(如果长度没有超过最大长度,并且输入流里面有信息)
    if (leftLength < maxReadLength && self.inputStream.hasBytesAvailable){
        
    leftLength += [self.inputStream read:dataPointer + leftLength maxLength:maxReadLength - leftLength];
    }    
    //3.从dataPointer内存中取出一个NALU单元到packetBuffer
    uint8_t *pStart = dataPointer + 4;
    uint8_t *pEnd = dataPointer + leftLength;
    
    //是NALU单元的前提是,头部和startCode吻合,并且除了头标志外是有内容的
    if (memcmp(startCode, dataPointer, 4) == 0 && leftLength > 4){
    //当起始和末尾相差不等于4的时候,就继续向后寻找NALU的末尾
    while (pStart != pEnd - 3){
    //获取NALU的长度(通过对比数据是否为0x00 00 00 01)
    if (memcmp(pStart, startCode, 4) == 0){
    
        //如果和NALU开头标志相同了,代表找到一个完整的NALU了,此时的pStart为此NALU的结束地址的后一个地址
        packetSize = pStart - dataPointer;
        packetBuffer =  malloc(packetSize);
        memcpy(packetBuffer, dataPointer, packetSize);
  
        //把dataPointer中信息向前移动
        memmove(dataPointer, dataPointer + packetSize, leftLength - packetSize);
        leftLength -= packetSize;
        break;
    }else{
    
        pStart++;
    }
    }
    }
}

//根据SPS和PPS直接初始化VTDecompressionsession, 每一个VTDecompressionsession都是不同的
- (void)initDecompressionsession{

    //1.创建CMVideoFormatDescriptionRef
    const uint8_t *parameterSetPointers[2] = {pSPS, pPPS};
    const size_t parameterSetSizes[2] = {mSPSSize, mPPSSize};
    CMVideoFormatDescriptionCreateFromH264ParameterSets(NULL, 2, parameterSetPointers, parameterSetSizes, 4, &_format);
    
    //2.
    NSDictionary *attr = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
    
    //3.解码之后的回调函数
    VTDecompressionOutputCallbackRecord callbackRecord;
    callbackRecord.decompressionOutputCallback = decompressionCallBack;
    
    //4.创建VTDecompressionsession
    VTDecompressionSessionCreate(NULL, self.format, NULL, (__bridge CFDictionaryRef _Nullable)(attr), &callbackRecord, &_session);
}

//解码:VTDecompressionSessionDecodeFrame
- (CVPixelBufferRef)decodeFrame{
    
    //1.创建CMBlockBufferRef
    CMBlockBufferRef blockBuffer = NULL;
    CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, packetBuffer, packetSize, kCFAllocatorNull, NULL, 0, packetSize, 0, &blockBuffer);

    //2.创建CMSampleBufferRef
    CMSampleBufferRef sampleBuffer = NULL;
    const size_t sampleSizeArray[] = {packetSize};
    CMSampleBufferCreateReady(NULL, blockBuffer, self.format, 0, 0, NULL, 0, sampleSizeArray, &sampleBuffer);
    
    //3.开始解码
    CVPixelBufferRef outputPixelBuffer = NULL;
    VTDecompressionSessionDecodeFrame(self.session, sampleBuffer, 0, &outputPixelBuffer, NULL);
    
    //4.释放资源
    CFRelease(sampleBuffer);
    CFRelease(blockBuffer);
    
    return outputPixelBuffer;
}

- (IBAction)play:(id)sender {
 
    //1.对于读取信息常量进行赋值
    maxReadLength = 1280 * 720;
    leftLength = 0;
    dataPointer = malloc(maxReadLength);
    
    //2.打开输入流
    [self.inputStream open];
    
    //3.开启定时器来读取信息
    [self.displayLink setPaused:NO];
}

//解码之后的回调函数
void decompressionCallBack(
                           void * CM_NULLABLE decompressionOutputRefCon,
                           void * CM_NULLABLE sourceFrameRefCon,
                           OSStatus status,
                           VTDecodeInfoFlags infoFlags,
                           CM_NULLABLE CVImageBufferRef imageBuffer,
                           CMTime presentationTimeStamp,
                           CMTime presentationDuration ){
    
    CVImageBufferRef *pointer = sourceFrameRefCon;
    *pointer = CVBufferRetain(imageBuffer);
}

解码要点:

1.创建定时器CADisplayLink
2.创建NSInputStream对象实时读取NALU单元
3.根据NALU单元的不同类型做出不同处理
  3.1.如果是sps或pps,记录
  3.2.如果是I帧,就根据SPS和PPS初始化VTDecompressionsession,然后解码
  3.3.如果是其它帧,直接解码
4.展示解码后的帧

你可能感兴趣的:(iOS直播App(解码篇))