https://github.com/saki4510t/UVCCamera
Web usb camera 开源项目
java 流程分析
以USB test8为例 MainActivity
case R.id.capture_button --> mCameraHandler.startRecording() --> thread.handleStartRecording();-->
handleStartRecording()...
AbstractUVCCameraHandler.java
public void handleStartRecording() {
if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
try {
if ((mUVCCamera == null) || (mMuxer != null)) return;
final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
MediaVideoBufferEncoder videoEncoder = null;
Log.e(TAG, "chaolong handleStartRecording: mEncoderType = " + mEncoderType);
switch (mEncoderType) { //mEncoder = 1
case 1: // for video capturing using MediaVideoEncoder
new MediaVideoEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
break;
case 2: // for video capturing using MediaVideoBufferEncoder
videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
break;
// case 0: // for video capturing using MediaSurfaceEncoder
default:
new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
break;
}
muxer.prepare();
muxer.startRecording(); 开始存储
....
}
存储方式在 MainActivity 初始化时,指定了1;
mCameraHandler = UVCCameraHandler.createHandler(this, mUVCCameraView,
USE_SURFACE_ENCODER ? 0 : 1, PREVIEW_WIDTH, PREVIEW_HEIGHT, PREVIEW_MODE);
所以继续分析 new MediaVideoEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
MediaEncoder 回调函数,在MediaVideoEncoder 构造函数传到基类中
AbstractUVCCameraHandler.java
private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
@Override
public void onPrepared(final MediaEncoder encoder) {
if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
mIsRecording = true;
if (encoder instanceof MediaVideoEncoder)
try {
mWeakCameraView.get().setVideoEncoder((MediaVideoEncoder)encoder);
} catch (final Exception e) {
Log.e(TAG, "onPrepared:", e);
}
if (encoder instanceof MediaSurfaceEncoder)
try {
mWeakCameraView.get().setVideoEncoder((MediaSurfaceEncoder)encoder);
mUVCCamera.startCapture(((MediaSurfaceEncoder)encoder).getInputSurface());
} catch (final Exception e) {
Log.e(TAG, "onPrepared:", e);
}
}
@Override
public void onStopped(final MediaEncoder encoder) {
// 。。。音频略过
通过打印log 查看 ,这里只实例化了MediaVideoEncoder
}
public MediaVideoEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mRenderHandler = RenderHandler.createHandler(TAG);
mWidth = width;
mHeight = height;
}
super(muxer, listener); 将接口赋值到MediaEncoder
muxer.startRecording(); 开始存储
MediaMuxerWrapper.java
private MediaEncoder mVideoEncoder, mAudioEncoder;
public void startRecording() {
if (mVideoEncoder != null)
mVideoEncoder.startRecording();
if (mAudioEncoder != null)
mAudioEncoder.startRecording();
}
mVideoEncoder在哪里赋值呢?
void addEncoder(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
...
}
addEncode 会传送MediaEncoder基类对象到MediaMuxerWrapper里面
在哪传的呢?
在MediaEncoder 构造函数里面,
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
...
muxer.addEncoder(this);
....
存储最终会调用到 MediaEncoder.java
void startRecording() {
if (DEBUG) Log.v(TAG, "startRecording");
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
mSync.notifyAll();
}
}
为什么这里会有mSync.notifyAll();
因为MediaEncoder构造函数 构造完时会等待
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
....
synchronized (mSync) {
try {
mSync.wait(); //等待
} catch (final InterruptedException e) {
}
......
}
@Override
public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
synchronized (mSync) {
mRequestStop = false;
mRequestDrain = 0;
mSync.notify();
}
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
localRequestDrain = (mRequestDrain > 0);
if (localRequestDrain)
mRequestDrain--;
}
if (localRequestStop) {
drain();
// request stop recording
signalEndOfInputStream();
// process output data again for EOS signale
drain();
// release all related objects
release();
break;
}
..........
signalEndOfInputStream();会保存
protected void encode(final byte[] buffer, final int length, final long presentationTimeUs) {
//if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
if (!mIsCapturing) return;
int ix = 0, sz;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing && ix < length) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
sz = inputBuffer.remaining();
sz = (ix + sz < length) ? sz : length - ix;
if (sz > 0 && (buffer != null)) {
inputBuffer.put(buffer, ix, sz);
}
ix += sz;
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
.....
}