H264 AAC G711 封装成MP4

这里有三种方法:
1.利用iOS的AVAssetWritter
2.FFmpeg
3.MP4V2

本文使用第三种:

本地h264和aac封装成MP4:

1.读取h264和aac文件的数据
NSString *h264FilePath = [[NSBundle mainBundle] pathForResource:@"文件名" ofType:@".h264"];
NSData *h264Data = [NSData dataWithContentsOfFile:h264FilePath];
//获取音频数据的代码同理
2.切割数据

这里需要考虑你读取到的数据是音频和视频分开的还是合在一起的.
对于合在一起的情况,根据公司定义好的规则把音视频数据切割.对于分开的,按照音视频的规则切割即可,由于合在一起的不好公开,以下贴上音视频分开的情况

//视频
//按照NALU切割视频数据,NALU一般以 0x00 0x00 0x00 0x01 或者 0x00 0x00 0x01分隔开
uint8_t *videoData = (uint8_t*)[h264Data bytes];
int j = 0;
    int lastJ = 0;
    while (j < h264Data.length ) {
        if (videoData[j] == 0x00 &&
            videoData[j + 1] == 0x00 &&
            videoData[j + 2] == 0x00 &&
            videoData[j + 3] == 0x01) {
            if (j > 0) {
                int frame_size = j - lastJ;
                NSData *buff = [NSData dataWithBytes:&videoData[lastJ] length:frame_size];
                lastJ = j;
                [mp4v2Tool addVideoData:buff];
            }
        }else if (j == h264Data.length - 1) {
            int frame_size = j - lastJ;
            NSData *buff = [NSData dataWithBytes:&videoData[lastJ] length:frame_size];
            lastJ = j;
            [mp4v2Tool addVideoData:buff];
        }
        j++;
    }
//音频
uint8_t *voiceData = (uint8_t*)[aacData bytes];
    j = 0;
    lastJ = 0;
    while (j < aacData.length) {
        if (voiceData[j] == 0xff &&
            (voiceData[j + 1] & 0xf0) == 0xf0) {
            if (j > 0) {
                //0xfff判断AAC头
                int frame_size = j - lastJ;
                if (frame_size > 7) {
                    NSData *buff = [NSData dataWithBytes:&voiceData[lastJ] length:frame_size];
//                    NSLog(@"%@",buff);
                    lastJ = j;
                    [mp4v2Tool addAudioData:buff];
                }
            }
        }else if (j == aacData.length - 1) {
            int frame_size = j - lastJ;
            if (frame_size > 7) {
                NSData *buff = [NSData dataWithBytes:&voiceData[lastJ] length:frame_size];
                //                    NSLog(@"%@",buff);
                lastJ = j;
                [mp4v2Tool addAudioData:buff];
            }
        }
        j++;
    }
3.创建MP4文件及设置相关参数
/*
     功能:创建MP4文件句柄。
     返回:MP4文件句柄。
     参数:fileName 要录制的MP4文件名;flags 创建文件类型,如果要创建普通文件用默认值0就可以,如要录制大于4G的MP4文件此处要设置MP4_CREATE_64BIT_DATA。
     */
    m_mp4FHandle = MP4Create(strFilePath);
//m_vTimeScale一般为9000;
MP4SetTimeScale(m_mp4FHandle, m_vTimeScale);
//对于g711
//alaw format
            /*
             初始化添加音频为PCM
             默认采样率是20ms
             */
            m_aTrackId = MP4AddALawAudioTrack(m_mp4FHandle, audioSampleRate);
            MP4SetTrackIntegerProperty(m_mp4FHandle, m_aTrackId, "mdia.minf.stbl.stsd.alaw.channels", 1);
            MP4SetTrackIntegerProperty(m_mp4FHandle, m_aTrackId, "mdia.minf.stbl.stsd.alaw.sampleSize", 8);
4.写入视频数据
根据切割到的nalu获取相关数据
typedef struct _MP4ENC_NaluUnit
{
    int frameType; //帧类型
    int frameLen;  //nalu长度,不包括00 00 00 01
    unsigned char *pframeBuf;   //不包括00 00 00 01
}MP4ENC_NaluUnit;
static int ReadOneNaluFromBuf(const unsigned char *buffer,
                              unsigned int nBufferSize,
                              unsigned int offSet,
                              MP4ENC_NaluUnit &nalu)
{
    unsigned int i = offSet;
    while(i < nBufferSize)
    {
        if(buffer[i++] == 0x00 && buffer[i] == 0x00 && buffer[i+1] == 0x00 && buffer[i+2] == 0x01)
        {
            unsigned int pos = i+3;
            unsigned int iEnd = i+3;
            unsigned int posEnd = 0;
            while (pos < nBufferSize)
            {
                if(buffer[pos++] == 0x00 && buffer[pos] == 0x00 && buffer[pos+1] == 0x00 && buffer[pos+2] == 0x01)
                {
                    posEnd = pos+3;
                    break;
                }
                posEnd = pos;
            }
            if(posEnd == nBufferSize)
            {
                nalu.frameLen = posEnd-iEnd;
            }
            else
            {
                nalu.frameLen = (posEnd - 4) - iEnd;
            }

            nalu.frameType = buffer[iEnd]&0x1f;
            nalu.pframeBuf = (unsigned char*)&buffer[iEnd];
            return (nalu.frameLen+iEnd-offSet);
        }
    }

    return 0;
}

//获取sps
if(m_bGetSpsSlice == false)
            {
                m_vTrackId = MP4AddH264VideoTrack(m_mp4FHandle,
                                                  m_vTimeScale,
                                                  m_vTimeScale/m_vFrateR,
                                                  m_vWidth,
                                                  m_vHeight,
                                                  nalu.pframeBuf[1],
                                                  nalu.pframeBuf[2],
                                                  nalu.pframeBuf[3],
                                                  3);
                if(m_vTrackId == MP4_INVALID_TRACK_ID)
                {
                    printf("add viedo trake failed.\n");
                    return -1;
                }
                
                
                MP4SetVideoProfileLevel(m_mp4FHandle, 1);
                MP4AddH264SequenceParameterSet(m_mp4FHandle, m_vTrackId, nalu.pframeBuf, nalu.frameLen);
                
                m_bGetSpsSlice = true;
            }
            

        }
//获取pps
if (nalu.frameType == 0x08) //pps
        {
            if(m_bGetPpsSlice == false)
            {
                MP4AddH264PictureParameterSet(m_mp4FHandle, m_vTrackId, nalu.pframeBuf, nalu.frameLen);
                m_bGetPpsSlice = true;
                
            }
            
        }
//写入数据
if((nalu.frameType != 0x06) && (nalu.frameType != 0x0d))
        {
            if((m_vTrackId != MP4_INVALID_TRACK_ID) && m_bGetSpsSlice && m_bGetPpsSlice && m_bRecord)
            {
                //当trackID有效,获取到sps,pps时开始写入数据
                int datalen = nalu.frameLen + 4;
                BYTE *data = new BYTE[datalen];
                
                data[0] = nalu.frameLen >> 24;
                data[1] = nalu.frameLen >> 16;
                data[2] = nalu.frameLen >> 8;
                data[3] = nalu.frameLen & 0xff;
                
                memcpy(data+4, nalu.pframeBuf, nalu.frameLen);

                if(!MP4WriteSample(m_mp4FHandle, m_vTrackId, (const uint8_t*)data, datalen,m_vTimeScale/m_vFrateR))
                {
                    printf("write a viedo failed\n");
                    delete []data;

                    return -1;
                }
                m_bGetIFrame = true;
                
                delete []data;
            }

        }
5.写入音频数据
int CMp4Encoder::WriteAudioTrack(BYTE* _aacData,int _aacSize)
{
    if(m_aTrackId == MP4_INVALID_TRACK_ID)
    {
        return -1;
    }
    
    if (!m_bGetIFrame)
    {
        return -1;
    }
    
    if(!m_bRecord)
    {
        return -1;
    }
    
    if(m_audioFormat == WAVE_FORMAT_AAC)
    {
         bool result = MP4WriteSample(m_mp4FHandle, m_aTrackId,(const uint8_t*) _aacData+7, _aacSize-7 ,1024, 0, 1);
        if (result == true) {
            printf("add success!\n");
        }else {
            printf("add failed!\n");
        }
    }
    else if (m_audioFormat == WAVE_FORMAT_G711)
    {
        MP4WriteSample(m_mp4FHandle, m_aTrackId,(const uint8_t*) _aacData, _aacSize ,MP4_INVALID_DURATION, 0, 1);
    }
   
    
    return _aacSize;
}
6.关闭
 MP4Close(m_mp4FHandle); 

MP4V2 编译iOS下使用的.a

MP4V2-iOS

下载后运行脚本即可

相关知识:

常用NAL(Network Abstract Layer)头的取值:
0x67:SPS
0x68:PPS
0x65:IDR
0x61:non-IDR Slice
0x01:B Slice
0x06:SEI
0x09:AU Delimiter

注意点:
需要先获取到sps和pps,序列参数集 SPS 作用于一系列连续的编码图像,而图像参数集 PPS 作用于编码视频序列中一个或多个独立 的图像。如果解码器没能正确接收到这两个参数集,那么其他NALU 也是无法解码的。因此它们一般在发送其它 NALU 之前发送,并且使用不同的信道或者更加可靠的传输协议(如TCP)进行传输,也可以重复传输。
视频帧率和音频采样率要设置正确,否则播放速度不正常或者音视频不同步.

你可能感兴趣的:(H264 AAC G711 封装成MP4)