android webrtc 视频流源码获取帧图像VideoFrame转bitmap 作为图像识别

由于需要使用opencv等项目识别webrtc中的画面
需要对webrtc的摄像头进行采集

该方法在EglRenderer implements VideoSink 类的onFrame中增加saveImgBitmap(frame)方法来获取图像具体代码如下
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame){

 //将org.webrtc.VideoFrame转bitmap并保存
 saveImgBitmap(frame)

 synchronized (statisticsLock) {
    ++framesReceived;
  }
  final boolean dropOldFrame;
  synchronized (handlerLock) {
    if (renderThreadHandler == null) {
      logD("Dropping frame - Not initialized or already released.");
      return;
    }
    synchronized (frameLock) {
      dropOldFrame = (pendingFrame != null);
      if (dropOldFrame) {
        pendingFrame.release();
      }
      pendingFrame = frame;
      pendingFrame.retain();
      renderThreadHandler.post(this ::renderFrameOnRenderThread);
    }
  }
  if (dropOldFrame) {
    synchronized (statisticsLock) {
      ++framesDropped;
    }
  }
 }
}

 

class EglRenderer implements VideoSink { 

 ......

  private final Matrix drawMatrix = new Matrix();
  // Used for bitmap capturing.
  private final GlTextureFrameBuffer bitmapTextureFramebuffer =
      new GlTextureFrameBuffer(GLES20.GL_RGBA);

 ......

public void saveImgBitmap(VideoFrame frame){
    drawMatrix.reset();
    drawMatrix.preTranslate(0.5f, 0.5f);
    drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
    drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
    drawMatrix.preTranslate(-0.5f, -0.5f);



    final int scaledWidth = (int) (1 * frame.getRotatedWidth());
    final int scaledHeight = (int) (1 * frame.getRotatedHeight());


    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
            GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */,
            0 /* viewportY */, scaledWidth, scaledHeight);

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
            0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);


    try {
      OutputStream outputStream=new FileOutputStream(Environment.getExternalStorageDirectory().getAbsolutePath()+"/"+"atest/"+System.currentTimeMillis()+".jpg");
      bitmap.compress(Bitmap.CompressFormat.JPEG,100,outputStream);
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    }
    
  }

}

 

你可能感兴趣的:(android,移动开发,java)