Android stagefright encoder

camera录像后,会运行一个带时间戳的回调函数, 然后一直调到
frameworks/base/media/libstagefright/CameraSource.cpp

void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        int32_t msgType, const sp &data) {
    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
       LOGE("--------- CameraSource datacallbacktimestamp ---------");
    Mutex::Autolock autoLock(mLock);
    if (!mStarted) {
        releaseOneRecordingFrame(data);
        ++mNumFramesReceived;
        ++mNumFramesDropped;
        return;
    }

    if (mNumFramesReceived > 0 &&
        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
        if (mNumGlitches % 10 == 0) { // Don't spam the log

            LOGW("Long delay detected in video recording");
        }
        ++mNumGlitches;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay

        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started

                // Drop it without updating the statistical data.

                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    mFramesReceived.push_back(data);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    LOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}

在 APP 里,点击录像后,会创建一个MediaRecorderService。

frameworks/base/media/libmediaplayerservice/MediaRecorderClient.cpp
status_t MediaRecorderClient::setCamera(const sp& camera)
{
    LOGE("setCamera");
    Mutex::Autolock lock(mLock);
    if (mRecorder == NULL) {
        LOGE("recorder is not initialized");
        return NO_INIT;
    }
    return mRecorder->setCamera(camera);
}
status_t MediaRecorderClient::init()
{
    LOGE("init");
    Mutex::Autolock lock(mLock);
    if (mRecorder == NULL) {
        LOGE("recorder is not initialized");
        return NO_INIT;
    }
    return mRecorder->init();
}

status_t MediaRecorderClient::setListener(const sp& listener)
{
    LOGE("setListener");
    Mutex::Autolock lock(mLock);
    if (mRecorder == NULL) {
        LOGE("recorder is not initialized");
        return NO_INIT;
    }
    return mRecorder->setListener(listener);
}

status_t MediaRecorderClient::setAudioSource(int as)
{
    LOGE("setAudioSource(%d)", as);
    if (!checkPermission(recordAudioPermission)) {
        return PERMISSION_DENIED;
    }
    Mutex::Autolock lock(mLock);
    if (mRecorder == NULL) {
        LOGE("recorder is not initialized");
        return NO_INIT;
    }
    return mRecorder->setAudioSource((audio_source)as);
}

status_t MediaRecorderClient::setVideoSource(int vs)
{
    LOGE("setVideoSource(%d)", vs);
    if (!checkPermission(cameraPermission)) {
        return PERMISSION_DENIED;
    }
    Mutex::Autolock lock(mLock);
    if (mRecorder == NULL)    {
        LOGE("recorder is not initialized");
        return NO_INIT;
    }
    return mRecorder->setVideoSource((video_source)vs);
}
等等函数。

MediaRecorderClient.cpp会直接调用:
frameworks/base/media/libmediaplayerservice/StagefrightRecorder.cpp
status_t StagefrightRecorder::setupVideoEncoder(sp *source) {
    source->clear();
    LOGE(" ----------- setupVideoEncoder -------------");

    LOGE(" -----------1 mVideoEncoderLevel = %d -------------",mVideoEncoderLevel);
    status_t err = setupCameraSource();
    if (err != OK) return err;

    sp cameraSource = CameraSource::CreateFromCamera(mCamera);
    CHECK(cameraSource != NULL);

    sp enc_meta = new MetaData;
    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
    enc_meta->setInt32(kKeySampleRate, mFrameRate);

    switch (mVideoEncoder) {
        case VIDEO_ENCODER_H263:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
            break;

        case VIDEO_ENCODER_MPEG_4_SP:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
            break;

        case VIDEO_ENCODER_H264:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
            break;

        default:
            CHECK(!"Should not be here, unsupported video encoding.");
            break;
    }

    sp meta = cameraSource->getFormat();

    int32_t width, height, stride, sliceHeight, colorFormat;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));
    CHECK(meta->findInt32(kKeyStride, &stride));
    CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
    CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));

    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);
    enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
    enc_meta->setInt32(kKeyStride, stride);
    enc_meta->setInt32(kKeySliceHeight, sliceHeight);
    enc_meta->setInt32(kKeyColorFormat, colorFormat);
    if (mVideoTimeScale > 0) {
        enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
    }
    if (mVideoEncoderProfile != -1) {
        enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
    }
    if (mVideoEncoderLevel != -1) {
        enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
        LOGE(" ----------- mVideoEncoderLevel = %d -------------",mVideoEncoderLevel);
    }
    LOGE(" ----------- mVideoEncoderLevel = %d -------------",mVideoEncoderLevel);

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    sp encoder = OMXCodec::Create(
            client.interface(), enc_meta,
            true /* createEncoder */, cameraSource);
    if (encoder == NULL) {
        return UNKNOWN_ERROR;
    }

    *source = encoder;

    return OK;
}
这里会创建一个 OMXCodec::Create 
frameworks/base/media/libstagefright/OMXCodec.cpp
这里会选择使用哪个编码器 或解码器:
static const CodecInfo kDecoderInfo[] = {
    { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },

    { MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },

// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },

    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },

    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },

    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
    { MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },

    { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
    { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Intel.Mrst.PSB" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },

    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Intel.Mrst.PSB" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },

    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Intel.Mrst.PSB" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },

    { MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
    { MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};

static const CodecInfo kEncoderInfo[] = {
    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.encode" },
    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBEncoder" },
    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.encode" },
    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
    { MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },

    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
    { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },

    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
    { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },

    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
    { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" }, //这里使用这个
// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },

};

// static

sp OMXCodec::Create(
        const sp &omx,
        const sp &meta, bool createEncoder,
        const sp &source,
        const char *matchComponentName,
        uint32_t flags) {
    const char *mime;
    bool success = meta->findCString(kKeyMIMEType, &mime);
    CHECK(success);
    LOGE("????????????????? OMX Create");

    Vector matchingCodecs;
    findMatchingCodecs(
            mime, createEncoder, matchComponentName, flags, &matchingCodecs);

    if (matchingCodecs.isEmpty()) {
        return NULL;
    }

    sp observer = new OMXCodecObserver;
    IOMX::node_id node = 0;

    const char *componentName;
    for (size_t i = 0; i < matchingCodecs.size(); ++i) {
        componentName = matchingCodecs[i].string();

        sp softwareCodec = createEncoder?
            InstantiateSoftwareEncoder(componentName, source, meta):
            InstantiateSoftwareCodec(componentName, source);

        if (softwareCodec != NULL) {
            LOGE("Successfully allocated software codec '%s'", componentName);

            return softwareCodec;
        }

        LOGE("Attempting to allocate OMX node '%s'", componentName);

        uint32_t quirks = getComponentQuirks(componentName, createEncoder);

        if (!createEncoder
                && (quirks & kOutputBuffersAreUnreadable)
                && (flags & kClientNeedsFramebuffer)) {
            if (strncmp(componentName, "OMX.SEC.", 8)) {
                // For OMX.SEC.* decoders we can enable a special mode that

                // gives the client access to the framebuffer contents.


                LOGW("Component '%s' does not give the client access to "
                     "the framebuffer contents. Skipping.",
                     componentName);

                continue;
            }
        }

        status_t err = omx->allocateNode(componentName, observer, &node);
        if (err == OK) {
            LOGE("Successfully allocated OMX node '%s'", componentName);

            sp codec = new OMXCodec(
                    omx, node, quirks,
                    createEncoder, mime, componentName,
                    source);

            observer->setCodec(codec);

            err = codec->configureCodec(meta, flags);

            if (err == OK) {
                return codec;
            }

            LOGE("Failed to configure codec '%s'", componentName);
        }
    }

    return NULL;
}
这个编码器 code 位于
frameworks/base/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
AVCEncoder::AVCEncoder(
        const sp& source,
        const sp& meta)
    : mSource(source),
      mMeta(meta),
      mNumInputFrames(-1),
      mPrevTimestampUs(-1),
      mStarted(false),
      mInputBuffer(NULL),
      mInputFrameData(NULL),
      mGroup(NULL) {

    LOGE("~~~~~ Construct software AVCEncoder");

    mHandle = new tagAVCHandle;
    memset(mHandle, 0, sizeof(tagAVCHandle));
    mHandle->AVCObject = NULL;
    mHandle->userData = this;
    mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
    mHandle->CBAVC_FrameBind = BindFrameWrapper;
    mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
    mHandle->CBAVC_Malloc = MallocWrapper;
    mHandle->CBAVC_Free = FreeWrapper;

    mInitCheck = initCheck(meta);
}

AVCEncoder::~AVCEncoder() {
    LOGE("Destruct software AVCEncoder");
    if (mStarted) {
        stop();
    }

    delete mEncParams;
    delete mHandle;
}

status_t AVCEncoder::initCheck(const sp& meta) {
    LOGE("~~~~~~ initCheck");
    CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
    CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
    CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
    CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));

    // XXX: Add more color format support

    CHECK(meta->findInt32(kKeyColorFormat, &mVideoColorFormat));
    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
        if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) {
            LOGE("Color format %d is not supported", mVideoColorFormat);
            return BAD_VALUE;
        }
        // Allocate spare buffer only when color conversion is needed.

        // Assume the color format is OMX_COLOR_FormatYUV420SemiPlanar.

        mInputFrameData =
            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
        CHECK(mInputFrameData);
    }

    // XXX: Remove this restriction

    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
        LOGE("Video frame size %dx%d must be a multiple of 16",
            mVideoWidth, mVideoHeight);
        return BAD_VALUE;
    }
  //很多参数设置:
    mEncParams = new tagAVCEncParam;
    memset(mEncParams, 0, sizeof(mEncParams));
    mEncParams->width = mVideoWidth;
    mEncParams->height = mVideoHeight;
    mEncParams->frame_rate = 1000 * mVideoFrameRate; // In frames/ms!

    mEncParams->rate_control = AVC_ON;
    mEncParams->bitrate = mVideoBitRate;
    mEncParams->initQP = 0;
    mEncParams->init_CBP_removal_delay = 1600;
    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);

    mEncParams->intramb_refresh = 0;
    mEncParams->auto_scd = AVC_ON;
    mEncParams->out_of_band_param_set = AVC_ON;
    mEncParams->poc_type = 2;
    mEncParams->log2_max_poc_lsb_minus_4 = 12;
    mEncParams->delta_poc_zero_flag = 0;
    mEncParams->offset_poc_non_ref = 0;
    mEncParams->offset_top_bottom = 0;
    mEncParams->num_ref_in_cycle = 0;
    mEncParams->offset_poc_ref = NULL;

    mEncParams->num_ref_frame = 1;
    mEncParams->num_slice_group = 1;
    mEncParams->fmo_type = 0;

    mEncParams->db_filter = AVC_ON;
    mEncParams->disable_db_idc = 0;

    mEncParams->alpha_offset = 0;
    mEncParams->beta_offset = 0;
    mEncParams->constrained_intra_pred = AVC_OFF;

    mEncParams->data_par = AVC_OFF;
    mEncParams->fullsearch = AVC_OFF;
    mEncParams->search_range = 16;
    mEncParams->sub_pel = AVC_OFF;
    mEncParams->submb_pred = AVC_OFF;
    mEncParams->rdopt_mode = AVC_OFF;
    mEncParams->bidir_pred = AVC_OFF;
    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
    uint32_t *sliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
    for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
        sliceGroup[ii] = idx++;
        if (idx >= mEncParams->num_slice_group) {
            idx = 0;
        }
    }
    mEncParams->slice_group = sliceGroup;

    mEncParams->use_overrun_buffer = AVC_OFF;

    // Set IDR frame refresh interval

    int32_t iFramesIntervalSec;
    CHECK(meta->findInt32(kKeyIFramesInterval, &iFramesIntervalSec));
    if (iFramesIntervalSec < 0) {
        mEncParams->idr_period = -1;
    } else if (iFramesIntervalSec == 0) {
        mEncParams->idr_period = 1; // All I frames

    } else {
        mEncParams->idr_period =
            (iFramesIntervalSec * mVideoFrameRate);
    }
    LOGE("idr_period: %d, I-frames interval: %d seconds, and frame rate: %d",
        mEncParams->idr_period, iFramesIntervalSec, mVideoFrameRate);

    // Set profile and level

    // If profile and level setting is not correct, failure

    // is reported when the encoder is initialized.

    mEncParams->profile = AVC_BASELINE;
    mEncParams->level = AVC_LEVEL3_2;
    int32_t profile, level;
    if (meta->findInt32(kKeyVideoProfile, &profile)) {
        mEncParams->profile = (AVCProfile) profile;
    }
    if (meta->findInt32(kKeyVideoLevel, &level)) {
        mEncParams->level = (AVCLevel) level;
        LOGE("~~~~~~~~~ 2 mEncParams->level = %d",mEncParams->level);
    }

        LOGE("~~~~~~~~~ mEncParams->level = %d",mEncParams->level);

    mFormat = new MetaData;
    mFormat->setInt32(kKeyWidth, mVideoWidth);
    mFormat->setInt32(kKeyHeight, mVideoHeight);
    mFormat->setInt32(kKeyBitRate, mVideoBitRate);
    mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
    mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
    mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
    mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
    return OK;
}

status_t AVCEncoder::start(MetaData *params) {
    LOGE("~~~~~~~~~~ start");
    if (mInitCheck != OK) {
        return mInitCheck;
    }

    if (mStarted) {
        LOGW("Call start() when encoder already started");
        return OK;
    }

    AVCEnc_Status err;
    LOGE("~~~~~~~~~~ PVAVCEncInitialize");
    err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL); //调用这个也是初始化一部分参数。
    if (err != AVCENC_SUCCESS) {
        LOGE("Failed to initialize the encoder: %d", err);
        return UNKNOWN_ERROR;
    }

    mGroup = new MediaBufferGroup();
    int32_t maxSize;
    if (AVCENC_SUCCESS !=
        PVAVCEncGetMaxOutputBufferSize(mHandle, &maxSize)) {
        maxSize = 31584; // Magic #

    }
    mGroup->add_buffer(new MediaBuffer(maxSize));

    mSource->start(params);
    mNumInputFrames = -2; // 1st two buffers contain SPS and PPS

    mStarted = true;
    mSpsPpsHeaderReceived = false;
    mReadyForNextFrame = true;
    mIsIDRFrame = 0;

    return OK;
}
再后面,就会进行具体的encoder process。
frameworks/base/media/libstagefright/codecs/avc/enc/src/avcenc_api.cpp
/* ======================================================================== */
/* Function : PVAVCEncInitialize() */
/* Date : 3/18/2004 */
/* Purpose : Initialize the encoder library, allocate memory and verify */
/* the profile/level support/settings. */
/* In/out : Encoding parameters. */
/* Return : AVCENC_SUCCESS for success. */
/* Modified : */
/* ======================================================================== */
OSCL_EXPORT_REF AVCEnc_Status PVAVCEncInitialize(AVCHandle *avcHandle, AVCEncParams *encParam,
        void* extSPS, void* extPPS)
{
    AVCEnc_Status status;
    AVCEncObject *encvid;
    AVCCommonObj *video;
    uint32 *userData = (uint32*) avcHandle->userData;
    int framesize;

    if (avcHandle->AVCObject != NULL)
    {
        return AVCENC_ALREADY_INITIALIZED; /* It's already initialized, need to cleanup first */
    }

    /* not initialized */

    /* allocate videoObject */
    avcHandle->AVCObject = (void*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCEncObject), DEFAULT_ATTR);
    if (avcHandle->AVCObject == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    encvid = (AVCEncObject*) avcHandle->AVCObject;
    memset(encvid, 0, sizeof(AVCEncObject)); /* reset everything */

    encvid->enc_state = AVCEnc_Initializing;

    encvid->avcHandle = avcHandle;

    encvid->common = (AVCCommonObj*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCCommonObj), DEFAULT_ATTR);
    if (encvid->common == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    video = encvid->common;
    memset(video, 0, sizeof(AVCCommonObj));

    /* allocate bitstream structure */
    encvid->bitstream = (AVCEncBitstream*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCEncBitstream), DEFAULT_ATTR);
    if (encvid->bitstream == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    encvid->bitstream->encvid = encvid; /* to point back for reallocation */

    /* allocate sequence parameter set structure */
    video->currSeqParams = (AVCSeqParamSet*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSeqParamSet), DEFAULT_ATTR);
    if (video->currSeqParams == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(video->currSeqParams, 0, sizeof(AVCSeqParamSet));

    /* allocate picture parameter set structure */
    video->currPicParams = (AVCPicParamSet*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCPicParamSet), DEFAULT_ATTR);
    if (video->currPicParams == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(video->currPicParams, 0, sizeof(AVCPicParamSet));

    /* allocate slice header structure */
    video->sliceHdr = (AVCSliceHeader*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSliceHeader), DEFAULT_ATTR);
    if (video->sliceHdr == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(video->sliceHdr, 0, sizeof(AVCSliceHeader));

    /* allocate encoded picture buffer structure*/
    video->decPicBuf = (AVCDecPicBuffer*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecPicBuffer), DEFAULT_ATTR);
    if (video->decPicBuf == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(video->decPicBuf, 0, sizeof(AVCDecPicBuffer));

    /* allocate rate control structure */
    encvid->rateCtrl = (AVCRateControl*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCRateControl), DEFAULT_ATTR);
    if (encvid->rateCtrl == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(encvid->rateCtrl, 0, sizeof(AVCRateControl));

    /* reset frame list, not really needed */
    video->currPic = NULL;
    video->currFS = NULL;
    encvid->currInput = NULL;
    video->prevRefPic = NULL;

    /* now read encParams, and allocate dimension-dependent variables */
    /* such as mblock */
    status = SetEncodeParam(avcHandle, encParam, extSPS, extPPS); /* initialized variables to be used in SPS*/
    if (status != AVCENC_SUCCESS)
    {
        return status;
    }

    if (encParam->use_overrun_buffer == AVC_ON)
    {
        /* allocate overrun buffer */
        encvid->oBSize = encvid->rateCtrl->cpbSize;
        if (encvid->oBSize > DEFAULT_OVERRUN_BUFFER_SIZE)
        {
            encvid->oBSize = DEFAULT_OVERRUN_BUFFER_SIZE;
        }
        encvid->overrunBuffer = (uint8*) avcHandle->CBAVC_Malloc(userData, encvid->oBSize, DEFAULT_ATTR);
        if (encvid->overrunBuffer == NULL)
        {
            return AVCENC_MEMORY_FAIL;
        }
    }
    else
    {
        encvid->oBSize = 0;
        encvid->overrunBuffer = NULL;
    }

    /* allocate frame size dependent structures */
    framesize = video->FrameHeightInMbs * video->PicWidthInMbs;

    video->mblock = (AVCMacroblock*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCMacroblock) * framesize, DEFAULT_ATTR);
    if (video->mblock == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    video->MbToSliceGroupMap = (int*) avcHandle->CBAVC_Malloc(userData, sizeof(uint) * video->PicSizeInMapUnits * 2, DEFAULT_ATTR);
    if (video->MbToSliceGroupMap == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    encvid->mot16x16 = (AVCMV*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCMV) * framesize, DEFAULT_ATTR);
    if (encvid->mot16x16 == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    memset(encvid->mot16x16, 0, sizeof(AVCMV)*framesize);

    encvid->intraSearch = (uint8*) avcHandle->CBAVC_Malloc(userData, sizeof(uint8) * framesize, DEFAULT_ATTR);
    if (encvid->intraSearch == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    encvid->min_cost = (int*) avcHandle->CBAVC_Malloc(userData, sizeof(int) * framesize, DEFAULT_ATTR);
    if (encvid->min_cost == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }

    /* initialize motion search related memory */
    if (AVCENC_SUCCESS != InitMotionSearchModule(avcHandle))
    {
        return AVCENC_MEMORY_FAIL;
    }

    if (AVCENC_SUCCESS != InitRateControlModule(avcHandle))
    {
        return AVCENC_MEMORY_FAIL;
    }

    /* intialize function pointers */
    encvid->functionPointer = (AVCEncFuncPtr*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCEncFuncPtr), DEFAULT_ATTR);
    if (encvid->functionPointer == NULL)
    {
        return AVCENC_MEMORY_FAIL;
    }
    encvid->functionPointer->SAD_Macroblock = &AVCSAD_Macroblock_C;
    encvid->functionPointer->SAD_MB_HalfPel[0] = NULL;
    encvid->functionPointer->SAD_MB_HalfPel[1] = &AVCSAD_MB_HalfPel_Cxh;
    encvid->functionPointer->SAD_MB_HalfPel[2] = &AVCSAD_MB_HalfPel_Cyh;
    encvid->functionPointer->SAD_MB_HalfPel[3] = &AVCSAD_MB_HalfPel_Cxhyh;

    /* initialize timing control */
    encvid->modTimeRef = 0; /* ALWAYS ASSUME THAT TIMESTAMP START FROM 0 !!!*/
    video->prevFrameNum = 0;
    encvid->prevCodedFrameNum = 0;
    encvid->dispOrdPOCRef = 0;

    if (encvid->outOfBandParamSet == TRUE)
    {
        encvid->enc_state = AVCEnc_Encoding_SPS;
    }
    else
    {
        encvid->enc_state = AVCEnc_Analyzing_Frame;
    }

    return AVCENC_SUCCESS;
}


你可能感兴趣的:(Video)