MediaMuxer源码分析

1.介绍:

MediaMuxer是Android中音视频复用器,可以合成video和audio。android kitkat版本上应用层目前只支持对Mp4文件的合成,TS文件(或TS包)的合成需要修改stagefright源码,本文对MediaMuxer源码作出分析。

2.干货:

MediaMuxer.h文件源码:

/*
 * Copyright 2013, The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_MUXER_H_
#define MEDIA_MUXER_H_

#include 
#include 
#include 
#include 

namespace android {

struct ABuffer;
struct AMessage;
struct MediaAdapter;
struct MediaBuffer;
struct MediaSource;
struct MetaData;
struct MPEG4Writer;

// MediaMuxer is used to mux multiple tracks into a video. Currently, we only
// support a mp4 file as the output.
// The expected calling order of the functions is:
// Constructor -> addTrack+ -> start -> writeSampleData+ -> stop
// If muxing operation need to be cancelled, the app is responsible for
// deleting the output file after stop.
struct MediaMuxer : public RefBase {
public:
    // Please update media/java/android/media/MediaMuxer.java if the
    // OutputFormat is updated.
    enum OutputFormat {
        OUTPUT_FORMAT_MPEG_4 = 0,
        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
    };

    // Construct the muxer with the output file path.
    MediaMuxer(const char *path, OutputFormat format);

    // Construct the muxer with the file descriptor. Note that the MediaMuxer
    // will close this file at stop().
    MediaMuxer(int fd, OutputFormat format);

    virtual ~MediaMuxer();

    /**
     * Add a track with its format information. This should be
     * called before start().
     * @param format the track's format.
     * @return the track's index or negative number if error.
     */
    ssize_t addTrack(const sp &format);

    /**
     * Start muxing. Make sure all the tracks have been added before
     * calling this.
     */
    status_t start();

    /**
     * Set the orientation hint.
     * @param degrees The rotation degrees. It has to be either 0,
     *                90, 180 or 270.
     * @return OK if no error.
     */
    status_t setOrientationHint(int degrees);

    /**
     * Set the location.
     * @param latitude The latitude in degree x 1000. Its value must be in the range
     * [-900000, 900000].
     * @param longitude The longitude in degree x 1000. Its value must be in the range
     * [-1800000, 1800000].
     * @return OK if no error.
     */
    status_t setLocation(int latitude, int longitude);

    /**
     * Stop muxing.
     * This method is a blocking call. Depending on how
     * much data is bufferred internally, the time needed for stopping
     * the muxer may be time consuming. UI thread is
     * not recommended for launching this call.
     * @return OK if no error.
     */
    status_t stop();

    /**
     * Send a sample buffer for muxing.
     * The buffer can be reused once this method returns. Typically,
     * this function won't be blocked for very long, and thus there
     * is no need to use a separate thread calling this method to
     * push a buffer.
     * @param buffer the incoming sample buffer.
     * @param trackIndex the buffer's track index number.
     * @param timeUs the buffer's time stamp.
     * @param flags the only supported flag for now is
     *              MediaCodec::BUFFER_FLAG_SYNCFRAME.
     * @return OK if no error.
     */
    status_t writeSampleData(const sp &buffer, size_t trackIndex,
                             int64_t timeUs, uint32_t flags) ;

private:
    sp mWriter;
    Vector< sp > mTrackList;  // Each track has its MediaAdapter.
    sp mFileMeta;  // Metadata for the whole file.

    Mutex mMuxerLock;

    enum State {
        UNINITIALIZED,
        INITIALIZED,
        STARTED,
        STOPPED
    };
    State mState;

    DISALLOW_EVIL_CONSTRUCTORS(MediaMuxer);
};

}  // namespace android

#endif  // MEDIA_MUXER_H_

可以看到MPEG4Writer是继承MediaWriter 的。

/*
 * Copyright (C) 2009 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MPEG4_WRITER_H_

#define MPEG4_WRITER_H_

#include 

#include 
#include 
#include 

namespace android {

class MediaBuffer;
class MediaSource;
class MetaData;

class MPEG4Writer : public MediaWriter {
public:
    MPEG4Writer(const char *filename);
    MPEG4Writer(int fd);

    // Limitations
    // 1. No more than 2 tracks can be added
    // 2. Only video or audio source can be added
    // 3. No more than one video and/or one audio source can be added.
    virtual status_t addSource(const sp &source);

    // Returns INVALID_OPERATION if there is no source or track.
    virtual status_t start(MetaData *param = NULL);
    virtual status_t stop() { return reset(); }
    virtual status_t pause();
    virtual bool reachedEOS();
    virtual status_t dump(int fd, const Vector& args);

    void beginBox(const char *fourcc);
    void writeInt8(int8_t x);
    void writeInt16(int16_t x);
    void writeInt32(int32_t x);
    void writeInt64(int64_t x);
    void writeCString(const char *s);
    void writeFourcc(const char *fourcc);
    void write(const void *data, size_t size);
    inline size_t write(const void *ptr, size_t size, size_t nmemb);
    void endBox();
    uint32_t interleaveDuration() const { return mInterleaveDurationUs; }
    status_t setInterleaveDuration(uint32_t duration);
    int32_t getTimeScale() const { return mTimeScale; }

    status_t setGeoData(int latitudex10000, int longitudex10000);
    void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
    int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }

protected:
    virtual ~MPEG4Writer();

private:
    class Track;

    int  mFd;
    status_t mInitCheck;
    bool mIsRealTimeRecording;
    bool mUse4ByteNalLength;
    bool mUse32BitOffset;
    bool mIsFileSizeLimitExplicitlyRequested;
    bool mPaused;
    bool mStarted;  // Writer thread + track threads started successfully
    bool mWriterThreadStarted;  // Only writer thread started successfully
    off64_t mOffset;
    off_t mMdatOffset;
    uint8_t *mMoovBoxBuffer;
    off64_t mMoovBoxBufferOffset;
    bool  mWriteMoovBoxToMemory;
    off64_t mFreeBoxOffset;
    bool mStreamableFile;
    off64_t mEstimatedMoovBoxSize;
    uint32_t mInterleaveDurationUs;
    int32_t mTimeScale;
    int64_t mStartTimestampUs;
    int mLatitudex10000;
    int mLongitudex10000;
    bool mAreGeoTagsAvailable;
    int32_t mStartTimeOffsetMs;

    Mutex mLock;

    List mTracks;

    List mBoxes;

    void setStartTimestampUs(int64_t timeUs);
    int64_t getStartTimestampUs();  // Not const
    status_t startTracks(MetaData *params);
    size_t numTracks();
    int64_t estimateMoovBoxSize(int32_t bitRate);

    struct Chunk {
        Track               *mTrack;        // Owner
        int64_t             mTimeStampUs;   // Timestamp of the 1st sample
        List mSamples;       // Sample data

        // Convenient constructor
        Chunk(): mTrack(NULL), mTimeStampUs(0) {}

        Chunk(Track *track, int64_t timeUs, List samples)
            : mTrack(track), mTimeStampUs(timeUs), mSamples(samples) {
        }

    };
    struct ChunkInfo {
        Track               *mTrack;        // Owner
        List         mChunks;        // Remaining chunks to be written

        // Previous chunk timestamp that has been written
        int64_t mPrevChunkTimestampUs;

        // Max time interval between neighboring chunks
        int64_t mMaxInterChunkDurUs;

    };

    bool            mIsFirstChunk;
    volatile bool   mDone;                  // Writer thread is done?
    pthread_t       mThread;                // Thread id for the writer
    List mChunkInfos;            // Chunk infos
    Condition       mChunkReadyCondition;   // Signal that chunks are available

    // Writer thread handling
    status_t startWriterThread();
    void stopWriterThread();
    static void *ThreadWrapper(void *me);
    void threadFunc();

    // Buffer a single chunk to be written out later.
    void bufferChunk(const Chunk& chunk);

    // Write all buffered chunks from all tracks
    void writeAllChunks();

    // Retrieve the proper chunk to write if there is one
    // Return true if a chunk is found; otherwise, return false.
    bool findChunkToWrite(Chunk *chunk);

    // Actually write the given chunk to the file.
    void writeChunkToFile(Chunk* chunk);

    // Adjust other track media clock (presumably wall clock)
    // based on audio track media clock with the drift time.
    int64_t mDriftTimeUs;
    void setDriftTimeUs(int64_t driftTimeUs);
    int64_t getDriftTimeUs();

    // Return whether the nal length is 4 bytes or 2 bytes
    // Only makes sense for H.264/AVC
    bool useNalLengthFour();

    // Return whether the writer is used for real time recording.
    // In real time recording mode, new samples will be allowed to buffered into
    // chunks in higher priority thread, even though the file writer has not
    // drained the chunks yet.
    // By default, real time recording is on.
    bool isRealTimeRecording() const;

    void lock();
    void unlock();

    // Acquire lock before calling these methods
    off64_t addSample_l(MediaBuffer *buffer);
    off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);

    bool exceedsFileSizeLimit();
    bool use32BitFileOffset() const;
    bool exceedsFileDurationLimit();
    bool isFileStreamable() const;
    void trackProgressStatus(size_t trackId, int64_t timeUs, status_t err = OK);
    void writeCompositionMatrix(int32_t degrees);
    void writeMvhdBox(int64_t durationUs);
    void writeMoovBox(int64_t durationUs);
    void writeFtypBox(MetaData *param);
    void writeUdtaBox();
    void writeGeoDataBox();
    void writeLatitude(int degreex10000);
    void writeLongitude(int degreex10000);
    void sendSessionSummary();
    void release();
    status_t reset();

    static uint32_t getMpeg4Time();

    MPEG4Writer(const MPEG4Writer &);
    MPEG4Writer &operator=(const MPEG4Writer &);
};

}  // namespace android

#endif  // MPEG4_WRITER_H_

MediaWriter.h源码:

/*
 * Copyright (C) 2010 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_WRITER_H_

#define MEDIA_WRITER_H_

#include 
#include 

namespace android {

struct MediaSource;
struct MetaData;

struct MediaWriter : public RefBase {
    MediaWriter()
        : mMaxFileSizeLimitBytes(0),
          mMaxFileDurationLimitUs(0) {
    }

    virtual status_t addSource(const sp &source) = 0;
    virtual bool reachedEOS() = 0;
    virtual status_t start(MetaData *params = NULL) = 0;
    virtual status_t stop() = 0;
    virtual status_t pause() = 0;

    virtual void setMaxFileSize(int64_t bytes) { mMaxFileSizeLimitBytes = bytes; }
    virtual void setMaxFileDuration(int64_t durationUs) { mMaxFileDurationLimitUs = durationUs; }
    virtual void setListener(const sp& listener) {
        mListener = listener;
    }

    virtual status_t dump(int fd, const Vector& args) {
        return OK;
    }

protected:
    virtual ~MediaWriter() {}
    int64_t mMaxFileSizeLimitBytes;
    int64_t mMaxFileDurationLimitUs;
    sp mListener;

    void notify(int msg, int ext1, int ext2) {
        if (mListener != NULL) {
            mListener->notify(msg, ext1, ext2);
        }
    }
private:
    MediaWriter(const MediaWriter &);
    MediaWriter &operator=(const MediaWriter &);
};

}  // namespace android

#endif  // MEDIA_WRITER_H_

回过头再来看MediaMuxer.cpp源码

/*
 * Copyright 2013, The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMuxer"
#include 

#include 

#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 

namespace android {

MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
    : mState(UNINITIALIZED) {
    if (format == OUTPUT_FORMAT_MPEG_4) {
        mWriter = new MPEG4Writer(path);
        mFileMeta = new MetaData;
        mState = INITIALIZED;
    }

}

MediaMuxer::MediaMuxer(int fd, OutputFormat format)
    : mState(UNINITIALIZED) {
    if (format == OUTPUT_FORMAT_MPEG_4) {
        mWriter = new MPEG4Writer(fd);
        mFileMeta = new MetaData;
        mState = INITIALIZED;
    }
}

MediaMuxer::~MediaMuxer() {
    Mutex::Autolock autoLock(mMuxerLock);

    // Clean up all the internal resources.
    mFileMeta.clear();
    mWriter.clear();
    mTrackList.clear();
}

ssize_t MediaMuxer::addTrack(const sp &format) {
    Mutex::Autolock autoLock(mMuxerLock);

    if (format.get() == NULL) {
        ALOGE("addTrack() get a null format");
        return -EINVAL;
    }

    if (mState != INITIALIZED) {
        ALOGE("addTrack() must be called after constructor and before start().");
        return INVALID_OPERATION;
    }

    sp trackMeta = new MetaData;
    convertMessageToMetaData(format, trackMeta);

    sp newTrack = new MediaAdapter(trackMeta);
    status_t result = mWriter->addSource(newTrack);
    if (result == OK) {
        return mTrackList.add(newTrack);
    }
    return -1;
}

status_t MediaMuxer::setOrientationHint(int degrees) {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState != INITIALIZED) {
        ALOGE("setOrientationHint() must be called before start().");
        return INVALID_OPERATION;
    }

    if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
        ALOGE("setOrientationHint() get invalid degrees");
        return -EINVAL;
    }

    mFileMeta->setInt32(kKeyRotation, degrees);
    return OK;
}

status_t MediaMuxer::setLocation(int latitude, int longitude) {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState != INITIALIZED) {
        ALOGE("setLocation() must be called before start().");
        return INVALID_OPERATION;
    }
    ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude);
    return mWriter->setGeoData(latitude, longitude);
}

status_t MediaMuxer::start() {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState == INITIALIZED) {
        mState = STARTED;
        mFileMeta->setInt32(kKeyRealTimeRecording, false);
        return mWriter->start(mFileMeta.get());
    } else {
        ALOGE("start() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }
}

status_t MediaMuxer::stop() {
    Mutex::Autolock autoLock(mMuxerLock);

    if (mState == STARTED) {
        mState = STOPPED;
        for (size_t i = 0; i < mTrackList.size(); i++) {
            if (mTrackList[i]->stop() != OK) {
                return INVALID_OPERATION;
            }
        }
        return mWriter->stop();
    } else {
        ALOGE("stop() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }
}

status_t MediaMuxer::writeSampleData(const sp &buffer, size_t trackIndex,
                                     int64_t timeUs, uint32_t flags) {
    Mutex::Autolock autoLock(mMuxerLock);

    if (buffer.get() == NULL) {
        ALOGE("WriteSampleData() get an NULL buffer.");
        return -EINVAL;
    }

    if (mState != STARTED) {
        ALOGE("WriteSampleData() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }

    if (trackIndex >= mTrackList.size()) {
        ALOGE("WriteSampleData() get an invalid index %d", trackIndex);
        return -EINVAL;
    }

    MediaBuffer* mediaBuffer = new MediaBuffer(buffer);

    mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned().
    mediaBuffer->set_range(buffer->offset(), buffer->size());

    sp sampleMetaData = mediaBuffer->meta_data();
    sampleMetaData->setInt64(kKeyTime, timeUs);
    // Just set the kKeyDecodingTime as the presentation time for now.
    sampleMetaData->setInt64(kKeyDecodingTime, timeUs);

    if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
        sampleMetaData->setInt32(kKeyIsSyncFrame, true);
    }

    sp currentTrack = mTrackList[trackIndex];
    // This pushBuffer will wait until the mediaBuffer is consumed.
    return currentTrack->pushBuffer(mediaBuffer);
}

}  // namespace android

在代码中通过以下代码创建一个合成器:

    const char *outputFileName = "/sdcard/muxeroutput.mp4";
    sp muxer = new MediaMuxer(outputFileName,MediaMuxer::OUTPUT_FORMAT_MPEG_4);

要正常使用Muxer还必须在实例化后、start前添加需要合成的音视频信息也就是需要调用:

MediaMuxer::addTrack(const sp &format)

在实际项目中代码如下:

if (haveVideo) {
    ALOGV("muxer: adding video track.");
    outputVideoTrack = muxer->addTrack(encoderOutputVideoFormat);
}
if (haveAudio) {
    ALOGV("muxer: adding audio track.");
    outputAudioTrack = muxer->addTrack(encoderOutputAudioFormat);
}
ALOGV("muxer: starting");
muxer->start();
muxing = true;

从MediaMuxer.cpp中可以看到addTrack实际上是把相应的音视频信息转成成可配置到MediaWriter中数据,并添加到一个mTrackList数组中.

ssize_t MediaMuxer::addTrack(const sp &format) {
    Mutex::Autolock autoLock(mMuxerLock);

    if (format.get() == NULL) {
        ALOGE("addTrack() get a null format");
        return -EINVAL;
    }

    if (mState != INITIALIZED) {
        ALOGE("addTrack() must be called after constructor and before start().");
        return INVALID_OPERATION;
    }

    sp trackMeta = new MetaData;
    convertMessageToMetaData(format, trackMeta);

    sp newTrack = new MediaAdapter(trackMeta);
    status_t result = mWriter->addSource(newTrack);
    if (result == OK) {
        return mTrackList.add(newTrack);
    }
    return -1;
}

MediaWriter在之前的代码中已经提到过,而MediaAdapter又是什么?

/*
 * Copyright (C) 2013 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_ADAPTER_H
#define MEDIA_ADAPTER_H

#include 
#include 
#include 
#include 
#include 

namespace android {

// Convert the MediaMuxer's push model into MPEG4Writer's pull model.
// Used only by the MediaMuxer for now.
struct MediaAdapter : public MediaSource, public MediaBufferObserver {
public:
    // MetaData is used to set the format and returned at getFormat.
    MediaAdapter(const sp &meta);
    virtual ~MediaAdapter();
    /////////////////////////////////////////////////
    // Inherited functions from MediaSource
    /////////////////////////////////////////////////

    virtual status_t start(MetaData *params = NULL);
    virtual status_t stop();
    virtual sp getFormat();
    virtual status_t read(
            MediaBuffer **buffer, const ReadOptions *options = NULL);

    /////////////////////////////////////////////////
    // Inherited functions from MediaBufferObserver
    /////////////////////////////////////////////////

    virtual void signalBufferReturned(MediaBuffer *buffer);

    /////////////////////////////////////////////////
    // Non-inherited functions:
    /////////////////////////////////////////////////

    // pushBuffer() will wait for the read() finish, and read() will have a
    // deep copy, such that after pushBuffer return, the buffer can be re-used.
    status_t pushBuffer(MediaBuffer *buffer);

private:
    Mutex mAdapterLock;
    // Make sure the read() wait for the incoming buffer.
    Condition mBufferReadCond;
    // Make sure the pushBuffer() wait for the current buffer consumed.
    Condition mBufferReturnedCond;

    MediaBuffer *mCurrentMediaBuffer;

    bool mStarted;
    sp mOutputFormat;

    DISALLOW_EVIL_CONSTRUCTORS(MediaAdapter);
};

}  // namespace android

#endif  // MEDIA_ADAPTER_H

继续看MediaSource:

/*
 * Copyright (C) 2009 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_SOURCE_H_

#define MEDIA_SOURCE_H_

#include 

#include 
#include 
#include 

namespace android {

class MediaBuffer;
class MetaData;

struct MediaSource : public virtual RefBase {
    MediaSource();

    // To be called before any other methods on this object, except
    // getFormat().
    virtual status_t start(MetaData *params = NULL) = 0;

    // Any blocking read call returns immediately with a result of NO_INIT.
    // It is an error to call any methods other than start after this call
    // returns. Any buffers the object may be holding onto at the time of
    // the stop() call are released.
    // Also, it is imperative that any buffers output by this object and
    // held onto by callers be released before a call to stop() !!!
    virtual status_t stop() = 0;

    // Returns the format of the data output by this media source.
    virtual sp getFormat() = 0;

    struct ReadOptions;

    // Returns a new buffer of data. Call blocks until a
    // buffer is available, an error is encountered of the end of the stream
    // is reached.
    // End of stream is signalled by a result of ERROR_END_OF_STREAM.
    // A result of INFO_FORMAT_CHANGED indicates that the format of this
    // MediaSource has changed mid-stream, the client can continue reading
    // but should be prepared for buffers of the new configuration.
    virtual status_t read(
            MediaBuffer **buffer, const ReadOptions *options = NULL) = 0;

    // Options that modify read() behaviour. The default is to
    // a) not request a seek
    // b) not be late, i.e. lateness_us = 0
    struct ReadOptions {
        enum SeekMode {
            SEEK_PREVIOUS_SYNC,
            SEEK_NEXT_SYNC,
            SEEK_CLOSEST_SYNC,
            SEEK_CLOSEST,
        };

        ReadOptions();

        // Reset everything back to defaults.
        void reset();

        void setSeekTo(int64_t time_us, SeekMode mode = SEEK_CLOSEST_SYNC);
        void clearSeekTo();
        bool getSeekTo(int64_t *time_us, SeekMode *mode) const;

        void setLateBy(int64_t lateness_us);
        int64_t getLateBy() const;

    private:
        enum Options {
            kSeekTo_Option      = 1,
        };

        uint32_t mOptions;
        int64_t mSeekTimeUs;
        SeekMode mSeekMode;
        int64_t mLatenessUs;
    };

    // Causes this source to suspend pulling data from its upstream source
    // until a subsequent read-with-seek. Currently only supported by
    // OMXCodec.
    virtual status_t pause() {
        return ERROR_UNSUPPORTED;
    }

    // The consumer of this media source requests that the given buffers
    // are to be returned exclusively in response to read calls.
    // This will be called after a successful start() and before the
    // first read() call.
    // Callee assumes ownership of the buffers if no error is returned.
    virtual status_t setBuffers(const Vector &buffers) {
        return ERROR_UNSUPPORTED;
    }

protected:
    virtual ~MediaSource();

private:
    MediaSource(const MediaSource &);
    MediaSource &operator=(const MediaSource &);
};

}  // namespace android

#endif  // MEDIA_SOURCE_H_

看到这里就明白了MediaSource就是一个RefBase指针,MediaAdapter继承于MediaSource,那么在MediaMuxer::addTrack时,调用到MPEG4Writer::addSource:

status_t MPEG4Writer::addSource(const sp &source) {
    Mutex::Autolock l(mLock);
    if (mStarted) {
        ALOGE("Attempt to add source AFTER recording is started");
        return UNKNOWN_ERROR;
    }

    // At most 2 tracks can be supported.
    if (mTracks.size() >= 2) {
        ALOGE("Too many tracks (%d) to add", mTracks.size());
        return ERROR_UNSUPPORTED;
    }

    CHECK(source.get() != NULL);

    // A track of type other than video or audio is not supported.
    const char *mime;
    source->getFormat()->findCString(kKeyMIMEType, &mime);
    bool isAudio = !strncasecmp(mime, "audio/", 6);
    bool isVideo = !strncasecmp(mime, "video/", 6);
    if (!isAudio && !isVideo) {
        ALOGE("Track (%s) other than video or audio is not supported",
            mime);
        return ERROR_UNSUPPORTED;
    }

    // At this point, we know the track to be added is either
    // video or audio. Thus, we only need to check whether it
    // is an audio track or not (if it is not, then it must be
    // a video track).

    // No more than one video or one audio track is supported.
    for (List::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
        if ((*it)->isAudio() == isAudio) {
            ALOGE("%s track already exists", isAudio? "Audio": "Video");
            return ERROR_UNSUPPORTED;
        }
    }

    // This is the first track of either audio or video.
    // Go ahead to add the track.
    Track *track = new Track(this, source, 1 + mTracks.size());
    mTracks.push_back(track);

    return OK;
}

MediaMuxer在配置好音视频信息后的start处理:

status_t MediaMuxer::start() {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState == INITIALIZED) {
        mState = STARTED;
        mFileMeta->setInt32(kKeyRealTimeRecording, false);
        return mWriter->start(mFileMeta.get());
    } else {
        ALOGE("start() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }
}
status_t MPEG4Writer::start(MetaData *param) {
    if (mInitCheck != OK) {
        return UNKNOWN_ERROR;
    }

    if (mMaxFileSizeLimitBytes != 0) {
        mIsFileSizeLimitExplicitlyRequested = true;
    }

    int32_t use64BitOffset;
    if (param &&
        param->findInt32(kKey64BitFileOffset, &use64BitOffset) &&
        use64BitOffset) {
        mUse32BitOffset = false;
    }

    if (mUse32BitOffset) {
        // Implicit 32 bit file size limit
        if (mMaxFileSizeLimitBytes == 0) {
            mMaxFileSizeLimitBytes = kMax32BitFileSize;
        }

        // If file size is set to be larger than the 32 bit file
        // size limit, treat it as an error.
        if (mMaxFileSizeLimitBytes > kMax32BitFileSize) {
            ALOGW("32-bit file size limit (%lld bytes) too big. "
                 "It is changed to %lld bytes",
                mMaxFileSizeLimitBytes, kMax32BitFileSize);
            mMaxFileSizeLimitBytes = kMax32BitFileSize;
        }
    }

    int32_t use2ByteNalLength;
    if (param &&
        param->findInt32(kKey2ByteNalLength, &use2ByteNalLength) &&
        use2ByteNalLength) {
        mUse4ByteNalLength = false;
    }

    int32_t isRealTimeRecording;
    if (param && param->findInt32(kKeyRealTimeRecording, &isRealTimeRecording)) {
        mIsRealTimeRecording = isRealTimeRecording;
    }

    mStartTimestampUs = -1;

    if (mStarted) {
        if (mPaused) {
            mPaused = false;
            return startTracks(param);
        }
        return OK;
    }

    if (!param ||
        !param->findInt32(kKeyTimeScale, &mTimeScale)) {
        mTimeScale = 1000;
    }
    CHECK_GT(mTimeScale, 0);
    ALOGV("movie time scale: %d", mTimeScale);

    /*
     * When the requested file size limit is small, the priority
     * is to meet the file size limit requirement, rather than
     * to make the file streamable. mStreamableFile does not tell
     * whether the actual recorded file is streamable or not.
     */
    mStreamableFile =
        (mMaxFileSizeLimitBytes != 0 &&
         mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);

   
    mWriteMoovBoxToMemory = false;
    mMoovBoxBuffer = NULL;
    mMoovBoxBufferOffset = 0;

    writeFtypBox(param);

    mFreeBoxOffset = mOffset;

    if (mEstimatedMoovBoxSize == 0) {
        int32_t bitRate = -1;
        if (param) {
            param->findInt32(kKeyBitRate, &bitRate);
        }
        mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
    }
    CHECK_GE(mEstimatedMoovBoxSize, 8);
    if (mStreamableFile) {
        // Reserve a 'free' box only for streamable file
        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
        writeInt32(mEstimatedMoovBoxSize);
        write("free", 4);
        mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
    } else {
        mMdatOffset = mOffset;
    }

    mOffset = mMdatOffset;
    lseek64(mFd, mMdatOffset, SEEK_SET);
    if (mUse32BitOffset) {
        write("????mdat", 8);
    } else {
        write("\x00\x00\x00\x01mdat????????", 16);
    }

    status_t err = startWriterThread();
    if (err != OK) {
        return err;
    }

    err = startTracks(param);
    if (err != OK) {
        return err;
    }

    mStarted = true;
    return OK;
}

status_t MPEG4Writer::startWriterThread() {
    ALOGV("startWriterThread");

    mDone = false;
    mIsFirstChunk = true;
    mDriftTimeUs = 0;
    for (List::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
        ChunkInfo info;
        info.mTrack = *it;
        info.mPrevChunkTimestampUs = 0;
        info.mMaxInterChunkDurUs = 0;
        mChunkInfos.push_back(info);
    }

    pthread_attr_t attr;
    pthread_attr_init(&attr);
    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
    pthread_create(&mThread, &attr, ThreadWrapper, this);
    pthread_attr_destroy(&attr);
    mWriterThreadStarted = true;
    return OK;
}

在start过程中,开启了write的线程。
在项目中,我是这样写入音视频数据:

if(trackIndex == videoTrack){
  if(presentationTimeUs >= mLastVideoSampleTime){
    useOriTime = true;
  }
  if (size >= 0 && outputVideoTrack != -1) {
    if(useOriTime){
      mLastVideoSampleTime = presentationTimeUs;
      err = muxer->writeSampleData(buffer,outputVideoTrack,mLastVideoSampleTime, flags);
      CHECK_EQ(err, (status_t)OK);
    }else{
      ALOGV("%s encoder loss one buffer.",trackIndex==videoTrack?"video":"audio");
    }
                                
  }
}else if(trackIndex == audioTrack){
  if(presentationTimeUs >= mLastAudioSampleTime){
    useOriTime = true;
  }
  if (size >= 0 && outputAudioTrack != -1) {
    if(useOriTime){
      mLastAudioSampleTime = presentationTimeUs;
      err = muxer->writeSampleData(buffer,outputAudioTrack,mLastAudioSampleTime, flags);
      CHECK_EQ(err, (status_t)OK);
    }else{
      ALOGV("%s encoder loss one   buffer.",trackIndex==videoTrack?"video":"audio");
    }
  }
}

通过muxer->writeSampleData写入音视频ES。

3.结束语

Android kitkat版本在应用层只支持对Mp4文件的合成,要支持TS等其他封装格式,就可以在libstagefright的MediaMuxer源码模块作处理,通过上面代码的分析基本已经可以只需要改几行代码就可以支持TS的封装了。本文分析到这里就结束了,感谢持续关注!

你可能感兴趣的:(MediaMuxer源码分析)