webrtc M66 android 端实现屏幕共享功能

webrtc M66 android 实现屏幕共享功能

一,我们先看一下摄像头采集的逻辑;

1. 摄像头的初始化

// Gets the device name of the front camera
Camera1Enumerator enumerator = new Camera1Enumerator(false);
String []names = enumerator.getDeviceNames();
String name = names[0];
for (String item:names){
    if (enumerator.isFrontFacing(item)){
        name = item;
        break;
     }
}
mVideoCapturerAndroid = (Camera1Capturer)enumerator.createCapturer(name, null); // 创建VideoCapturer对象
mVideoCapturerAndroid.initialize(mSurfaceTextureHelper, mApplicationContext, null);//初始化VideoCapturer对象
videoSource = mPeerFactory.createVideoSource(mVideoCapturerAndroid);//通过VideoCapturer创建VideoSource对象;
mVideoCapturerAndroid.startCapture(mVideoWidth, mVideoHeight, mFps);//开始视频采集

2. 摄像头采集的实现逻辑

  • 我们知道摄像头采集的数据通过java层的api采集到,然后通过jni传到c++层,先看看是如何实现的
public interface VideoCapturer {
    void initialize(SurfaceTextureHelper var1, Context var2, VideoCapturer.CapturerObserver var3);

    void startCapture(int var1, int var2, int var3);

    void stopCapture() throws InterruptedException;

    void changeCaptureFormat(int var1, int var2, int var3);

    void dispose();

    boolean isScreencast();

    public interface CapturerObserver {
        void onCapturerStarted(boolean var1);

        void onCapturerStopped();

        /** @deprecated */
        @Deprecated
        default void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation, long timeStamp) {
            throw new UnsupportedOperationException("Deprecated and not implemented.");
        }

        /** @deprecated */
        @Deprecated
        default void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
            throw new UnsupportedOperationException("Deprecated and not implemented.");
        }

        void onFrameCaptured(VideoFrame var1);
    }
}
  • 从上面的代码可以看出,VideoCapturer接口很简单,我们重点看CapturerObserver接口,java层视频数据就是通过这个接口传输给C++层的,主要是使用void onFrameCaptured(VideoFrame var1)方法,我们在看一下这个接口是什么时候设置的。
public VideoSource createVideoSource(VideoCapturer capturer) {
       org.webrtc.EglBase.Context eglContext = this.localEglbase == null ? null : this.localEglbase.getEglBaseContext();
       SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("VideoCapturerThread", eglContext);
       long nativeAndroidVideoTrackSource = nativeCreateVideoSource(this.nativeFactory, surfaceTextureHelper, capturer.isScreencast());
       CapturerObserver capturerObserver = new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
       capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
       return new VideoSource(nativeAndroidVideoTrackSource);
}

在创建VideoSource的时候,会创建一个CapturerObserver native对象通过VideoCapturerinitialize()方法传给VideoCapturer

二,屏幕共享的实现逻辑

1, 继承VideoCapturer接口,实现里面的方法,通过void initialize(SurfaceTextureHelper var1, Context var2, VideoCapturer.CapturerObserver var3)方法获取VideoCapturer.CapturerObserver的引用,然后把屏幕抓取的数据通过VideoCapturer.CapturerObserver接口的void onFrameCaptured(VideoFrame var1)方法传给c++层。
2,android 在5.0以后提供了屏幕抓取的API,实现代码如下:

public class ScreenRecorder implements VideoCapturer {
    private static final String TAG = "ScreenRecorder";
    private int mWidth;
    private int mHeight;
    private int mDpi;
    private int mFrameRate;

    private Handler cameraThreadHandler;
    private Context applicationContext;
    private CapturerObserver capturerObserver;
    private SurfaceTextureHelper surfaceHelper;
    private final Object stateLock = new Object();
    private final Object mMutex = new Object();
    private HandlerThread mHandlerThread;
    private VideoFrame mLastSendFrame;
    private long       mLastSendTSMs;

    private int mOrientation = -1;
    private MediaProjection mMediaProjection;
    // parameters for the encoder
    private ImageReader mImgReader;
    private Handler mHandler;
    private AtomicBoolean mQuit = new AtomicBoolean(false);
    private VirtualDisplay mVirtualDisplay = null;
    private byte[] inputData = null;
    private byte[] bufferData = null;
    private byte[] outputData = null;

    public ScreenRecorder(int dpi, MediaProjection mp) {
        mDpi = dpi;
        mMediaProjection = mp;
        mHandlerThread = new HandlerThread("HandlerThread");
        mHandlerThread.start();
        mHandler = new Handler(mHandlerThread.getLooper());
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void initImageReader(int width, int height, int frameRate) {
        mImgReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 3);
        mImgReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image img;
                try {
                    img = mImgReader.acquireLatestImage();
                    if (img != null) {
                        Image.Plane[] planes = img.getPlanes();
                        if (planes[0].getBuffer() == null) {
                            return;
                        }
                        int width = mImgReader.getWidth();
                        int height = mImgReader.getHeight();
                        int pixelStride = planes[0].getPixelStride();
                        int rowStride = planes[0].getRowStride();
                        int rowPadding = rowStride - pixelStride * width;
                        ByteBuffer buffer = planes[0].getBuffer();
                        if (rowPadding>0){
                            //竖屏的处理
                            if (bufferData==null){
                                bufferData = new byte[buffer.capacity()];
                            }
                            buffer.get(bufferData);
                            SignalingChannel.ConvertABGRPlaneToData(bufferData,inputData,width,height,pixelStride,rowPadding);
                        }else{
                            //横屏的处理
                            buffer.get(inputData);
                        }
                        SignalingChannel.ConvertABGR8888ToYUV420SP(inputData,outputData,width,height);
                        long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
                        VideoFrame.Buffer frameBuffer = new NV21Buffer(outputData, width, height,null);
                        VideoFrame frame = new VideoFrame(frameBuffer, 0, captureTimeNs);
                        if (capturerObserver != null) {
                            capturerObserver.onFrameCaptured(frame);
                            mLastSendTSMs = System.currentTimeMillis();
                        }
                        frame.release();
                        if (getDeviceOrientation()!=mOrientation){
                            mOrientation = getDeviceOrientation();
                            mHandler.post(vhrun);
                        }
                        img.close();
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        },mHandler);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void createVirtualDisplay(int width, int height) {
        mVirtualDisplay = mMediaProjection.createVirtualDisplay(TAG + "-display", width, height, mDpi,
                DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImgReader.getSurface(),null,null);
    }

    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, CapturerObserver capturerObserver) {
        this.applicationContext = applicationContext;
        this.capturerObserver = capturerObserver;
        this.surfaceHelper = surfaceTextureHelper;
        this.cameraThreadHandler = surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
    }

    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    @Override
    public void startCapture(int width, int height, int framerate) {
        heavySet(width,height);
        LogManager.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
        if (this.applicationContext == null) {
            throw new RuntimeException("ScreenRecorder must be initialized before calling startCapture.");
        } else {
            synchronized(stateLock) {
                mFrameRate = framerate;
                bufferData = null;
                inputData = new byte[width*height*4];//ABGR
                outputData = new byte[width*height*3/2];//Yuv420sp
                initImageReader(mWidth, mHeight, framerate);
                createVirtualDisplay(mWidth, mHeight);
                if (capturerObserver!=null){
                    capturerObserver.onCapturerStarted(true);
                }
            }
        }
        mQuit.set(false);
        Thread thread =  new Thread(new Runnable() {
            @Override
            public void run() {
                long preTS = 0;
                long intervalTS = 100;  // 10 fps
                while (!mQuit.get()){
                    long startTS = System.currentTimeMillis();
                    if (preTS==0){
                        preTS = startTS;
                    }
                    if(startTS-mLastSendTSMs>500&&capturerObserver != null){
                        long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
                        VideoFrame.Buffer frameBuffer = new NV21Buffer(outputData, mWidth, mHeight,null);
                        VideoFrame frame = new VideoFrame(frameBuffer, 0, captureTimeNs);
                        if (capturerObserver != null) {
                            capturerObserver.onFrameCaptured(frame);
                        }
                        frame.release();
                    }
                    long diffTS = startTS - preTS;
                    long waitTime = Math.max(intervalTS + intervalTS - diffTS,0);
                    synchronized(mMutex){
                        try {
                            waitTime = Math.max(waitTime,50);
                            mMutex.wait(waitTime);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                            LogManager.w(e.toString());
                        }
                    }
                    //TODO
                    preTS = startTS;
                }
            }
        });
        thread.start();
    }

    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    @Override
    public void stopCapture(){
        synchronized(stateLock){
            mQuit.set(true);
            synchronized (mMutex){
                mMutex.notify();
            }
            if (mVirtualDisplay != null) {
                mVirtualDisplay.release();
                mVirtualDisplay = null;
            }
            if (mImgReader!=null){
                mImgReader.close();
                mImgReader = null;
            }
            if (mLastSendFrame!=null){
                mLastSendFrame.release();
                mLastSendFrame = null;
            }
        }
    }

    @Override
    public void changeCaptureFormat(int width, int height, int framerate) {
        LogManager.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
        synchronized(stateLock) {
            this.stopCapture();
            this.startCapture(width, height, framerate);
        }
    }

    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    @Override
    public void dispose() {
        this.stopCapture();
        if (mMediaProjection != null) {
            mMediaProjection.stop();
        }
        if (mHandlerThread!=null){
            mHandlerThread.quitSafely();
            mHandlerThread = null;
        }
    }

    @Override
    public boolean isScreencast() {
        return true;
    }

    private int getDeviceOrientation() {
        int orientation = 0;
        WindowManager wm = (WindowManager)this.applicationContext.getSystemService(WINDOW_SERVICE);
        switch(wm.getDefaultDisplay().getRotation()) {
            case 0:
            default:
                orientation = 0;
                break;
            case 1:
                orientation = 90;
                break;
            case 2:
                orientation = 180;
                break;
            case 3:
                orientation = 270;
        }
        return orientation;
    }

    private void heavySet(int width, int height){
        mOrientation = getDeviceOrientation();
        if (mOrientation == 0||mOrientation==180){
            mWidth = Math.min(width,height);
            mHeight = Math.max(width,height);
        }else {
            mWidth = Math.max(width,height);
            mHeight = Math.min(width,height);
        }
    }

    private Runnable vhrun = new Runnable() {
        @Override
        public void run() {
            changeCaptureFormat(mWidth,mHeight,mFrameRate);
        }
    };
}

3,注意事项

  • 屏幕抓取的视频格式是RGB的需要转成Yuv420sp的;
  • 屏幕抓取的要放到Service中,而不是Activity中;
  • 屏幕抓取的横竖屏是通过宽高设置的;
  • 屏幕抓取的帧率是动态的,主要和屏幕内容的变化有关;

你可能感兴趣的:(android,开发,webrtc)