android opencv 调用硬编码

目录

opencv部分

编码部分

获取类函数签名:


opencv部分

#include 
#include 

extern "C" {

JNIEXPORT void JNICALL Java_com_example_myapp_MyActivity_encode(JNIEnv *env, jobject instance, jlong matAddr) {
    // 获取Mat对象
    cv::Mat &mat = *(cv::Mat *) matAddr;

    // 转换Mat对象为Bitmap对象
    cv::Mat rgbaMat;
    cvtColor(mat, rgbaMat, cv::COLOR_BGR2RGBA);
    AndroidBitmapInfo info;
    info.width = rgbaMat.cols;
    info.height = rgbaMat.rows;
    info.stride = rgbaMat.step1();
    info.format = ANDROID_BITMAP_FORMAT_RGBA_8888;
    jobject bitmap = env->NewDirectByteBuffer(rgbaMat.data, rgbaMat.total() * rgbaMat.elemSize());

    // 调用Java层的encode方法
    jclass clazz = env->GetObjectClass(instance);
    jmethodID methodId = env->GetMethodID(clazz, "encode", "(Ljava/nio/ByteBuffer;)V");
    env->CallVoidMethod(instance, methodId, bitmap);
}

}

编码部分

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;

import java.io.IOException;
import java.nio.ByteBuffer;

public class VideoEncoder {
    private MediaCodec codec;
    private MediaFormat format;

    public VideoEncoder(int width, int height) {
        try {
            codec = MediaCodec.createEncoderByType("video/avc");
            format = MediaFormat.createVideoFormat("video/avc", width, height);
            format.setInteger(MediaFormat.KEY_BIT_RATE, 500000);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
            codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            codec.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void encode(ByteBuffer buffer) {
        int inputBufferIndex = codec.dequeueInputBuffer(-1);
        if (inputBufferIndex >= 0) {
            ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferIndex);
            inputBuffer.put(buffer);
            codec.queueInputBuffer(inputBufferIndex, 0, buffer.capacity(), System.currentTimeMillis(), 0);
        }

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 0);
        while (outputBufferIndex >= 0) {
            codec.releaseOutputBuffer(outputBufferIndex, false);
            outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 0);
        }
    }

    public void release() {
        codec.stop();
        codec.release();
    }
}

获取类函数签名:

javap -s com.example.myapp.MyActivity

c++ 方案2

#include 
#include 
#include 

extern "C" {

JNIEXPORT void JNICALL Java_com_example_myapp_MyActivity_encodeImages(JNIEnv *env, jobject instance, jobjectArray imagePaths, jstring outputPath, jint width, jint height, jint frameRate, jint bitRate) {
    // 创建MyEncoder对象
    jclass cls = env->FindClass("com/example/myapp/MyEncoder");
    jmethodID constructor = env->GetMethodID(cls, "", "(Ljava/lang/String;IIII)V");
    jobject encoder = env->NewObject(cls, constructor, outputPath, width, height, frameRate, bitRate);

    // 获取MyEncoder类的encodeFrame方法的ID
    jmethodID encodeFrame = env->GetMethodID(cls, "encodeFrame", "([BJ)V");

    // 遍历每个图片
    jsize numImages = env->GetArrayLength(imagePaths);
    for (jsize i = 0; i < numImages; i++) {
        jstring imagePath = (jstring)env->GetObjectArrayElement(imagePaths, i);
        const char *imagePathCStr = env->GetStringUTFChars(imagePath, NULL);

        // 使用OpenCV读取图片
        cv::Mat image = cv::imread(imagePathCStr, cv::IMREAD_COLOR);

        // 在这里,你需要将Mat对象转换为YUV420格式的帧
        // TODO: 将Mat对象转换为YUV420格式的帧

        // 创建一个Java字节数组,用于存储YUV420格式的帧
        jbyteArray yuv = env->NewByteArray(yuvSize);
        env->SetByteArrayRegion(yuv, 0, yuvSize, (jbyte*)yuvData);

        // 调用MyEncoder的encodeFrame方法
        jlong pts = i * 1000000 / frameRate;
        env->CallVoidMethod(encoder, encodeFrame, yuv, pts);

        env->ReleaseStringUTFChars(imagePath, imagePathCStr);
    }

    // 释放MyEncoder对象
    jmethodID release = env->GetMethodID(cls, "release", "()V");
    env->CallVoidMethod(encoder, release);
}

}

java方案2:

public class MyEncoder {
    private MediaCodec codec;
    private MediaFormat format;
    private MediaMuxer muxer;
    private int trackIndex;
    private boolean muxerStarted;

    public MyEncoder(String outputPath, int width, int height, int frameRate, int bitRate) throws IOException {
        format = MediaFormat.createVideoFormat("video/avc", width, height);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

        codec = MediaCodec.createEncoderByType("video/avc");
        codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        codec.start();

        muxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        muxerStarted = false;
    }

    public void encodeFrame(byte[] yuv, long pts) throws IOException {
        ByteBuffer[] inputBuffers = codec.getInputBuffers();
        int inputBufferIndex = codec.dequeueInputBuffer(10000);
        if (inputBufferIndex >= 0) {
            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
            inputBuffer.clear();
            inputBuffer.put(yuv);
            codec.queueInputBuffer(inputBufferIndex, 0, yuv.length, pts, 0);
        }

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 10000);
        while (outputBufferIndex >= 0) {
            if (!muxerStarted) {
                trackIndex = muxer.addTrack(codec.getOutputFormat());
                muxer.start();
                muxerStarted = true;
            }

            ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferIndex);
            muxer.writeSampleData(trackIndex, outputBuffer, bufferInfo);
            codec.releaseOutputBuffer(outputBufferIndex, false);

            outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 10000);
        }
    }

    public void release() {
        if (codec != null) {
            codec.stop();
            codec.release();
            codec = null;
        }
        if (muxer != null) {
            if (muxerStarted) {
                muxer.stop();
            }
            muxer.release();
            muxer = null;
        }
    }
}

你可能感兴趣的:(android开发,android,opencv,人工智能)