[Audio]
1 AudioSource
1.1 AudioSource构造函数
52AudioSource::AudioSource( 53 audio_source_t inputSource, const String16 &opPackageName, 54 uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate, 55 uid_t uid, pid_t pid)
1.2 DataCallBack
299status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) { 300 int64_t timeUs, position, timeNs; 301 ExtendedTimestamp ts; 302 ExtendedTimestamp::Location location; 303 const int32_t usPerSec = 1000000; 304 305 if (mRecord->getTimestamp(&ts) == OK && 306 ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, 307 &location) == OK) { 308 // Use audio timestamp. 309 timeUs = timeNs / 1000 - 310 (position - mNumFramesSkipped - 311 mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate; 312 } else { 313 // This should not happen in normal case. 314 ALOGW("Failed to get audio timestamp, fallback to use systemclock"); 315 timeUs = systemTime() / 1000ll; 316 // Estimate the real sampling time of the 1st sample in this buffer 317 // from AudioRecord's latency. (Apply this adjustment first so that 318 // the start time logic is not affected.) 319 timeUs -= mRecord->latency() * 1000LL; 320 } 321 322 ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
1.3 Read
235status_t AudioSource::read(
236 MediaBuffer **out, const ReadOptions * /* options */) {
280 if (mSampleRate != mOutSampleRate) { 281 timeUs *= (int64_t)mSampleRate / (int64_t)mOutSampleRate; 282 buffer->meta_data()->setInt64(kKeyTime, timeUs); 283 } 284 285 *out = buffer; 286 return OK; 287}
[Video][Camera API 1.0]
2 CameraSource / CameraSourceTimeLapse (for time-lapse-recording)
1575 mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera( 1576 mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid, 1577 videoSize, mFrameRate, mPreviewSurface, 1578 std::llround(1e6 / mCaptureFps));
3 VideoEncoder(Acodec OnFillBufferDone)
4 MediaCodecSource(MediaCodec::CB_OUTPUT_AVAILABLE)
5 Writer(source->read buffer->meta_data kKeyTime)
[Video][Camera API 2.0]
6 GraphicBufferSource
6.1
897void GraphicBufferSource::onFrameAvailable(const BufferItem& item __unused) { 898 Mutex::Autolock autoLock(mMutex); 899 900 ALOGV("onFrameAvailable: executing=%d available=%zu+%d", 901 mExecuting, mAvailableBuffers.size(), mNumAvailableUnacquiredBuffers); 902 ++mNumAvailableUnacquiredBuffers; 903 904 // For BufferQueue we cannot acquire a buffer if we cannot immediately feed it to the codec 905 // UNLESS we are discarding this buffer (acquiring and immediately releasing it), which makes 906 // this an ugly logic. 907 // NOTE: We could also rely on our debug counter but that is meant only as a debug counter. 908 if (!areWeDiscardingAvailableBuffers_l() && mFreeCodecBuffers.empty()) { 909 // we may not be allowed to acquire a possibly encodable buffer, so just note that 910 // it is available 911 ALOGV("onFrameAvailable: cannot acquire buffer right now, do it later"); 912 913 ++mRepeatLastFrameGeneration; // cancel any pending frame repeat 914 return; 915 } 916 917 VideoBuffer buffer; 918 status_t err = acquireBuffer_l(&buffer); 919 if (err != OK) { 920 ALOGE("onFrameAvailable: acquireBuffer returned err=%d", err); 921 } else { 922 onBufferAcquired_l(buffer); 923 } 924}
6.2
934void GraphicBufferSource::onBufferAcquired_l(const VideoBuffer &buffer) { 935 if (mEndOfStreamSent) { 936 // This should only be possible if a new buffer was queued after 937 // EOS was signaled, i.e. the app is misbehaving. 938 ALOGW("onFrameAvailable: EOS is sent, ignoring frame"); 939 } else if (mOMXNode == NULL || (mSuspended && mActionQueue.empty())) { 940 // FIXME: if we are suspended but have a resume queued we will stop repeating the last 941 // frame. Is that the desired behavior? 942 ALOGV("onFrameAvailable: suspended, ignoring frame"); 943 } else { 944 ++mRepeatLastFrameGeneration; // cancel any pending frame repeat 945 mAvailableBuffers.push_back(buffer); 946 if (mExecuting) { 947 fillCodecBuffer_l(); 948 } 949 } 950}
6.3
513bool GraphicBufferSource::fillCodecBuffer_l() {
631 int64_t timeUs = item.mTimestampNs / 1000; 632 if (mFrameDropper != NULL && mFrameDropper->shouldDrop(timeUs)) { 633 ALOGV("skipping frame (%lld) to meet max framerate", static_cast<long long>(timeUs)); 634 // set err to OK so that the skipped frame can still be saved as the lastest frame 635 err = OK; 636 } else { 637 err = submitBuffer_l(item); // this takes shared ownership of the acquired buffer on succeess 638 }
6.4
766status_t GraphicBufferSource::submitBuffer_l(const VideoBuffer &item) { 767 CHECK(!mFreeCodecBuffers.empty()); 768 IOMX::buffer_id codecBufferId = *mFreeCodecBuffers.begin(); 769 770 ALOGV("submitBuffer_l [slot=%d, bufferId=%d]", item.mBuffer->getSlot(), codecBufferId); 771 772 int64_t codecTimeUs; 773 if (!calculateCodecTimestamp_l(item.mTimestampNs, &codecTimeUs)) { 774 return UNKNOWN_ERROR; 775 } 776 777 if ((android_dataspace)item.mDataspace != mLastDataspace) { 778 onDataspaceChanged_l( 779 item.mDataspace, 780 (android_pixel_format)item.mBuffer->getGraphicBuffer()->format); 781 } 782 783 std::shared_ptr<AcquiredBuffer> buffer = item.mBuffer; 784 // use a GraphicBuffer for now as OMXNodeInstance is using GraphicBuffers to hold references 785 // and it requires this graphic buffer to be able to hold its reference 786 // and thus we would need to create a new GraphicBuffer from an ANWBuffer separate from the 787 // acquired GraphicBuffer. 788 // TODO: this can be reworked globally to use ANWBuffer references 789 sp<GraphicBuffer> graphicBuffer = buffer->getGraphicBuffer(); 790 status_t err = mOMXNode->emptyBuffer( 791 codecBufferId, OMX_BUFFERFLAG_ENDOFFRAME, graphicBuffer, codecTimeUs, 792 buffer->getAcquireFenceFd());
6.5
715bool GraphicBufferSource::calculateCodecTimestamp_l( 716 nsecs_t bufferTimeNs, int64_t *codecTimeUs) { 717 int64_t timeUs = bufferTimeNs / 1000; 718 timeUs += mInputBufferTimeOffsetUs; 719 720 if (mCaptureFps > 0. 721 && (mFps > 2 * mCaptureFps 722 || mCaptureFps > 2 * mFps)) { 723 // Time lapse or slow motion mode
剩下的流程同[Video][Camera API 1.0]
VideoEncoder(Acodec OnFillBufferDone)
MediaCodecSource(MediaCodec::CB_OUTPUT_AVAILABLE)
Writer(source->read buffer->meta_data kKeyTime)
[代码集成][StagefrightRecorder]
7 设置
696status_t StagefrightRecorder::setParamCaptureFpsEnable(int32_t captureFpsEnable) { 697 ALOGV("setParamCaptureFpsEnable: %d", captureFpsEnable); 698 699 if(captureFpsEnable == 0) { 700 mCaptureFpsEnable = false; 701 } else if (captureFpsEnable == 1) { 702 mCaptureFpsEnable = true; 703 } else { 704 return BAD_VALUE; 705 } 706 return OK; 707} 708 709status_t StagefrightRecorder::setParamCaptureFps(double fps) { 710 ALOGV("setParamCaptureFps: %.2f", fps); 711 712 if (!(fps >= 1.0 / 86400)) { 713 ALOGE("FPS is too small"); 714 return BAD_VALUE; 715 } 716 mCaptureFps = fps; 717 return OK; 718}
8 配置Audio
1044sp<MediaCodecSource> StagefrightRecorder::createAudioSource() { 1045 int32_t sourceSampleRate = mSampleRate; 1046 1047 if (mCaptureFpsEnable && mCaptureFps >= mFrameRate) { 1048 // Upscale the sample rate for slow motion recording. 1049 // Fail audio source creation if source sample rate is too high, as it could 1050 // cause out-of-memory due to large input buffer size. And audio recording 1051 // probably doesn't make sense in the scenario, since the slow-down factor 1052 // is probably huge (eg. mSampleRate=48K, mCaptureFps=240, mFrameRate=1). 1053 const static int32_t SAMPLE_RATE_HZ_MAX = 192000; 1054 sourceSampleRate = 1055 (mSampleRate * mCaptureFps + mFrameRate / 2) / mFrameRate; 1056 if (sourceSampleRate < mSampleRate || sourceSampleRate > SAMPLE_RATE_HZ_MAX) { 1057 ALOGE("source sample rate out of range! " 1058 "(mSampleRate %d, mCaptureFps %.2f, mFrameRate %d", 1059 mSampleRate, mCaptureFps, mFrameRate); 1060 return NULL; 1061 } 1062 } 1063 1064 sp<AudioSource> audioSource = 1065 new AudioSource( 1066 mAudioSource, 1067 mOpPackageName, 1068 sourceSampleRate, 1069 mAudioChannels, 1070 mSampleRate, 1071 mClientUid, 1072 mClientPid);
9 配置Video[Camera API 1.0]
1560status_t StagefrightRecorder::setupCameraSource( 1561 sp<CameraSource> *cameraSource) { 1562 status_t err = OK; 1563 if ((err = checkVideoEncoderCapabilities()) != OK) { 1564 return err; 1565 } 1566 Size videoSize; 1567 videoSize.width = mVideoWidth; 1568 videoSize.height = mVideoHeight; 1569 if (mCaptureFpsEnable) { 1570 if (!(mCaptureFps > 0.)) { 1571 ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps); 1572 return BAD_VALUE; 1573 } 1574 1575 mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera( 1576 mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid, 1577 videoSize, mFrameRate, mPreviewSurface, 1578 std::llround(1e6 / mCaptureFps)); 1579 *cameraSource = mCameraSourceTimeLapse;
10 配置Video[Camera API 2.0]
10.1 MediaCodecSource
1650 if (cameraSource != NULL) { 1651 sp<MetaData> meta = cameraSource->getFormat(); 1652 1653 int32_t width, height, stride, sliceHeight, colorFormat; 1654 CHECK(meta->findInt32(kKeyWidth, &width)); 1655 CHECK(meta->findInt32(kKeyHeight, &height)); 1656 CHECK(meta->findInt32(kKeyStride, &stride)); 1657 CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight)); 1658 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); 1659 1660 format->setInt32("width", width); 1661 format->setInt32("height", height); 1662 format->setInt32("stride", stride); 1663 format->setInt32("slice-height", sliceHeight); 1664 format->setInt32("color-format", colorFormat); 1665 } else { 1666 format->setInt32("width", mVideoWidth); 1667 format->setInt32("height", mVideoHeight); 1668 format->setInt32("stride", mVideoWidth); 1669 format->setInt32("slice-height", mVideoHeight); 1670 format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); 1671 1672 // set up time lapse/slow motion for surface source 1673 if (mCaptureFpsEnable) { 1674 if (!(mCaptureFps > 0.)) { 1675 ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps); 1676 return BAD_VALUE; 1677 } 1678 format->setDouble("time-lapse-fps", mCaptureFps); 1679 } 1680 }
10.2 MediaCodecSource构造函数
414MediaCodecSource::MediaCodecSource( 415 const sp<ALooper> &looper, 416 const sp<AMessage> &outputFormat, 417 const sp<MediaSource> &source, 418 const sp<PersistentSurface> &persistentSurface, 419 uint32_t flags) 420 : mLooper(looper), 421 mOutputFormat(outputFormat),
10.3 MediaCodecSource创建VideoEncoder
468status_t MediaCodecSource::initEncoder() { 502 503 err = mEncoder->configure( 504 mOutputFormat, 505 NULL /* nativeWindow */, 506 NULL /* crypto */, 507 MediaCodec::CONFIGURE_FLAG_ENCODE);
10.2 VideoEncoder配置
1711status_t ACodec::configureCodec( 1712 const char *mime, const sp&msg) {
1836 if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) { 1837 mCaptureFps = -1.0; 1838 }
10.3 GraphicBufferSource配置
6644 if (mCodec->mCaptureFps > 0. && mCodec->mFps > 0.) { 6645 err = statusFromBinderStatus( 6646 mCodec->mGraphicBufferSource->setTimeLapseConfig( 6647 mCodec->mFps, mCodec->mCaptureFps)); 6648 6649 if (err != OK) { 6650 ALOGE("[%s] Unable to configure time lapse (err %d)", 6651 mCodec->mComponentName.c_str(), err); 6652 return err; 6653 } 6654 }
1223status_t GraphicBufferSource::setTimeLapseConfig(double fps, double captureFps) {
1224 ALOGV("setTimeLapseConfig: fps=%lg, captureFps=%lg",
1225 fps, captureFps);
1226
1227 Mutex::Autolock autoLock(mMutex);
1228
1229 if (mExecuting || !(fps > 0) || !(captureFps > 0)) {
1230 return INVALID_OPERATION;
1231 }
1232
1233 mFps = fps;
1234 mCaptureFps = captureFps;
1235
1236 return OK;
1237}