补充下如何利用ffmpeg 实现裸码流录制文件
前面有博客讲到linux下编译ffmpeg
那么编译完成之后应该怎么使用呢?
在参考了ffmpeg解码文件的demo这里给出一个解码的so库JNI实现方法
在编译完成ffmpeg的源码后,新建一个工程如下图目录结构
在ffmpeg编译后的源码中include文件夹中拷贝上述头文件,然后将编译出的so库拷贝至prebuilt文件夹
新建Android.mk
内容如下:
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := avutil-54-prebuilt
LOCAL_SRC_FILES := prebuilt/libavutil-54.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avswresample-1-prebuilt
LOCAL_SRC_FILES := prebuilt/libswresample-1.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swscale-3-prebuilt
LOCAL_SRC_FILES := prebuilt/libswscale-3.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec-56-prebuilt
LOCAL_SRC_FILES := prebuilt/libavcodec-56.so
include $(PREBUILT_SHARED_LIBRARY)
#include $(CLEAR_VARS)
#LOCAL_MODULE := avdevice-56-prebuilt
#LOCAL_SRC_FILES := prebuilt/libavdevice-56.so
#include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avformat-56-prebuilt
LOCAL_SRC_FILES := prebuilt/libavformat-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avfilter-5-prebuilt
LOCAL_SRC_FILES := prebuilt/libavfilter-5.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := libffmpegutil
LOCAL_SRC_FILES := FFmpeg.c
LOCAL_LDLIBS := -llog -ljnigraphics -lz -landroid -lm -pthread
LOCAL_SHARED_LIBRARIES := avcodec-56-prebuilt avdevice-56-prebuilt avfilter-5-prebuilt avformat-56-prebuilt avutil-54-prebuilt avswresample-1-prebuilt swscale-3-prebuilt
include $(BUILD_SHARED_LIBRARY)
新建Application.mk 内容:
APP_ABI := armeabi-v7a
APP_PLATFORM := android-9
新建接口FFmpeg.java :
package com.android.concox;
import android.util.Log;
public class FFmpeg {
static {
try {
System.loadLibrary("avutil-54");
System.loadLibrary("swresample-1");
System.loadLibrary("swscale-3");
System.loadLibrary("avcodec-56");
System.loadLibrary("avformat-56");
System.loadLibrary("postproc-53");
System.loadLibrary("avfilter-5");
System.loadLibrary("ffmpegutil");
} catch (UnsatisfiedLinkError ule) {
Log.d("FFMPEG", ule.getMessage());
}
}
public static native int H264DecoderInit();
public static native int H264DecoderRelease();
public static native int H264Decode(byte[] in, int insize, int[] framePara,
byte[] out);
public static native int GetFFmpegVersion();
public static native int InitRecorder(String path, int width, int height);
public static native int Record(byte[] frame, int length);
public static native int ReleseRecorder();
}
生成头文件:
定位到FFmpeg.class目录, cmd中运行 javah -jni com.android.FFmpeg.class 生成 com_android_FFmpeg.h
新建接口实现类FFmpeg.c:
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "com_android_concox_FFmpeg.h"
//record
#include
#include
#include
#include
#define LOG_TAG "H264Android.c"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#ifdef __cplusplus
extern "C" {
#endif
//Video
struct AVCodecContext *pAVCodecCtx = NULL;
struct AVCodec *pAVCodec;
struct AVPacket mAVPacket;
struct AVFrame *pAVFrame = NULL;
struct SwsContext* pImageConvertCtx = NULL;
struct AVFrame *pFrameYUV = NULL;
//Audio
struct AVCodecContext *pAUCodecCtx = NULL;
struct AVCodec *pAUCodec;
struct AVPacket mAUPacket;
struct AVFrame *pAUFrame = NULL;
int iWidth = 0;
int iHeight = 0;
int *colortab = NULL;
int *u_b_tab = NULL;
int *u_g_tab = NULL;
int *v_g_tab = NULL;
int *v_r_tab = NULL;
//short *tmp_pic=NULL;
unsigned int *rgb_2_pix = NULL;
unsigned int *r_2_pix = NULL;
unsigned int *g_2_pix = NULL;
unsigned int *b_2_pix = NULL;
//record
AVOutputFormat *fmt;
AVFormatContext *oc;
AVStream *audio_st, *video_st;
double audio_pts, video_pts;
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
int16_t *samples;
uint8_t *audio_outbuf;
#define URL_RDONLY 0
#define URL_WRONLY 1
#define URL_RDWR 2
void DeleteYUVTab() {
// av_free(tmp_pic);
av_free(colortab);
av_free(rgb_2_pix);
}
unsigned char r_table[256][256];
unsigned char g_table_yu[256][256];
unsigned char g_table_v[256];
unsigned char b_table[256][256];
int yuv420p2rgb565_table_init() {
int i = 0;
int j = 0;
for (i = 0; i < 256; i++) {
for (j = 0; j < 256; j++) {
r_table[i][j] = (i + ((74711 * j) >> 16)) >> 3;
}
}
for (i = 0; i < 256; i++) {
for (j = 0; j < 256; j++) {
g_table_yu[i][j] = i - ((25559 * j) >> 16);
}
}
for (i = 0; i < 256; i++) {
g_table_v[i] = (304087 * i) >> 19;
}
for (i = 0; i < 256; i++) {
for (j = 0; j < 256; j++) {
b_table[i][j] = (i + ((66519 * j) >> 15)) >> 3;
}
}
}
int yuv420p2rgb565(AVFrame *pAVFrame, unsigned char * dst, int w, int h) {
//R = Y + 1.14V
//G = Y - 0.39U - 0.58V
//B = Y + 2.03U
unsigned char *y_data = pAVFrame->data[0];
unsigned char *u_data = pAVFrame->data[1];
unsigned char *v_data = pAVFrame->data[2];
unsigned char *out_data = dst;
unsigned char *Y = y_data;
unsigned char *U = u_data;
unsigned char *V = v_data;
unsigned int R = 0;
unsigned int G = 0;
unsigned int B = 0;
unsigned short int *out = (unsigned short int *) out_data;
int i = 0;
int size = w * h / 2;
for (i = 0; i < size; i++) {
#if 0
R = (Y[i] + ((74711 * V[i / 4]) >> 16)) >> 3;
G = (Y[i] - ((25559 * U[i / 4]) >> 16) - ((304087 * V[i / 4]) >> 19)) >> 2;
B = (Y[i] + ((66519 * U[i / 4]) >> 15)) >> 3;
#endif
R = r_table[Y[i]][V[i / 2]];
G = (g_table_yu[Y[i]][U[i / 2]] - g_table_v[V[i / 2]]) >> 2;
B = b_table[Y[i]][U[i / 2]];
out[i] = ((R << 11) & 0xF800) + ((G << 5) & 0x07E0) + (B & 0x001F);
// printf("R %u G %u B %u out %d\n", R, G, B, out[i]);
}
}
void CreateYUVTab_16() {
int i;
int u, v;
// tmp_pic = (short*)av_malloc(iWidth*iHeight*2); // 缂傛挸鐡� iWidth * iHeight * 16bits
colortab = (int *) av_malloc(4 * 256 * sizeof(int));
u_b_tab = &colortab[0 * 256];
u_g_tab = &colortab[1 * 256];
v_g_tab = &colortab[2 * 256];
v_r_tab = &colortab[3 * 256];
for (i = 0; i < 256; i++) {
u = v = (i - 128);
u_b_tab[i] = (int) (1.772 * u);
u_g_tab[i] = (int) (0.34414 * u);
v_g_tab[i] = (int) (0.71414 * v);
v_r_tab[i] = (int) (1.402 * v);
}
rgb_2_pix = (unsigned int *) av_malloc(3 * 768 * sizeof(unsigned int));
r_2_pix = &rgb_2_pix[0 * 768];
g_2_pix = &rgb_2_pix[1 * 768];
b_2_pix = &rgb_2_pix[2 * 768];
for (i = 0; i < 256; i++) {
r_2_pix[i] = 0;
g_2_pix[i] = 0;
b_2_pix[i] = 0;
}
for (i = 0; i < 256; i++) {
r_2_pix[i + 256] = (i & 0xF8) << 8;
g_2_pix[i + 256] = (i & 0xFC) << 3;
b_2_pix[i + 256] = (i) >> 3;
}
for (i = 0; i < 256; i++) {
r_2_pix[i + 512] = 0xF8 << 8;
g_2_pix[i + 512] = 0xFC << 3;
b_2_pix[i + 512] = 0x1F;
}
r_2_pix += 256;
g_2_pix += 256;
b_2_pix += 256;
}
void DisplayYUV_16(unsigned int *pdst1, unsigned char *y, unsigned char *u,
unsigned char *v, int width, int height, int src_ystride,
int src_uvstride, int dst_ystride) {
int i, j;
int r, g, b, rgb;
int yy, ub, ug, vg, vr;
unsigned char* yoff;
unsigned char* uoff;
unsigned char* voff;
unsigned int* pdst = pdst1;
int width2 = width / 2;
int height2 = height / 2;
if (width2 > iWidth / 2) {
width2 = iWidth / 2;
y += (width - iWidth) / 4 * 2;
u += (width - iWidth) / 4;
v += (width - iWidth) / 4;
}
if (height2 > iHeight)
height2 = iHeight;
for (j = 0; j < height2; j++) {
yoff = y + j * 2 * src_ystride;
uoff = u + j * src_uvstride;
voff = v + j * src_uvstride;
for (i = 0; i < width2; i++) {
yy = *(yoff + (i << 1));
ub = u_b_tab[*(uoff + i)];
ug = u_g_tab[*(uoff + i)];
vg = v_g_tab[*(voff + i)];
vr = v_r_tab[*(voff + i)];
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;
rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];
yy = *(yoff + (i << 1) + 1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;
pdst[(j * dst_ystride + i)] = (rgb)
+ ((r_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << 16);
yy = *(yoff + (i << 1) + src_ystride);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;
rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];
yy = *(yoff + (i << 1) + src_ystride + 1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;
pdst[((2 * j + 1) * dst_ystride + i * 2) >> 1] = (rgb)
+ ((r_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << 16);
}
}
}
static AVStream * add_video_stream(AVFormatContext *oc, int codec_id, int wight,
int height) {
AVCodecContext *c;
AVStream *st;
st = avformat_new_stream(oc, NULL);
if (!st) {
fprintf(stderr, "Could not alloc stream\n");
exit(1);
}
c = st->codec;
c->codec_id = AV_CODEC_ID_H264;
c->codec_type = AVMEDIA_TYPE_VIDEO;
/* put sample parameters */
c->bit_rate = 4000000; //av_Info.quality;
/* resolution must be a multiple of two */
c->width = wight;
c->height = height;
/* time base: this is the fundamental unit of time (in seconds) in terms
of which frame timestamps are represented. for fixed-fps content,
timebase should be 1/framerate and timestamp increments should be
identically 1. */
//c->time_base.den = (STREAM_FRAME_RATE)/100;
c->time_base.den = 30;
c->time_base.num = 1; //100;
c->gop_size = 10;
c->me_range = 16;
c->max_qdiff = 4;
c->qmin = 10;
c->qmax = 51;
c->qcompress = 0.6;
c->flags = 0;
c->pix_fmt = PIX_FMT_YUVJ420P;
if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
/* just for testing, we also add B frames */
c->max_b_frames = 2;
}
if (c->codec_id == CODEC_ID_MPEG1VIDEO) {
/* Needed to avoid using macroblocks in which some coeffs overflow.
This does not happen with normal video, it just happens here as
the motion of the chroma plane does not match the luma plane. */
c->mb_decision = 2;
}
// some formats want stream headers to be separate
// if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
// c->flags |= CODEC_FLAG_GLOBAL_HEADER;
if (oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
return st;
}
/*
* add an audio output stream
*/
static AVStream *add_audio_stream(AVFormatContext *oc, int codec_id,
int BitRate) {
AVCodecContext *c;
AVStream *st;
st = avformat_new_stream(oc, NULL);
if (!st) {
//fprintf(stderr, "Could not alloc stream\n");
LOGD("could not do avformat_new_stream");
return;
}
c = st->codec;
c->codec_id = codec_id;
c->codec_type = AVMEDIA_TYPE_AUDIO;
/* put sample parameters */
c->bit_rate = 64000;
c->sample_rate = 8000;
c->channels = 1;
c->channel_layout = AV_CH_LAYOUT_MONO;
c->sample_fmt = AV_SAMPLE_FMT_S16;
// if (oc->oformat->flags & AVFMT_GLOBALHEADER)
// c->flags |= CODEC_FLAG_GLOBAL_HEADER;
return st;
}
static void open_video(AVFormatContext *oc, AVStream *st) {
AVCodec *codec;
AVCodecContext *c;
c = st->codec;
/* find the video encoder */
#if 1
codec = avcodec_find_encoder(c->codec_id);
LOGD("codec_id:%d\n", c->codec_id);
if (!codec) {
LOGD("! codec ");
return;
}
/* open the codec */
if (avcodec_open2(c, codec, NULL) < 0) {
LOGD("avcodec_open2 fail ");
return;
}
#else
c->coded_frame = malloc(sizeof(AVFrame));
#endif
}
static void open_audio(AVFormatContext *oc, AVStream *st) {
float t, tincr, tincr2;
int audio_outbuf_size;
int audio_input_frame_size;
int AbortSignal = 0;
double Pts_Addon_Duration = 0;
AVDictionary * opt = NULL;
AVCodecContext *c;
AVCodec *codec;
c = st->codec;
/* find the audio encoder */
codec = avcodec_find_encoder(AV_CODEC_ID_PCM_ALAW);
// codec = avcodec_find_encoder(AV_CODEC_ID_PCM_S16BE);
if (!codec) {
//fprintf(stderr, "codec not found\n");
LOGD("could not find audio codec");
return;
}
//av_dict_set(&opt, "ac", "2", 0);
/* open it */
//int ret = avcodec_open2(c, codec, &opt);
int ret = avcodec_open2(c, codec, NULL);
if (ret < 0) {
//fprintf(stderr, "could not open codec\n");
LOGD("could not open audio codec:%d\n", ret);
return;
}
/* init signal generator */
t = 0;
tincr = 2 * M_PI * 110.0 / c->sample_rate;
/* increment frequency by 110 Hz per second */
tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
audio_outbuf_size = 8000;
audio_outbuf = av_malloc(audio_outbuf_size);
/* ugly hack for PCM codecs (will be removed ASAP with new PCM
support to compute the input frame size in samples */
if (c->frame_size <= 1) {
audio_input_frame_size = audio_outbuf_size / c->channels;
switch (st->codec->codec_id) {
case CODEC_ID_PCM_S16LE:
case CODEC_ID_PCM_S16BE:
case CODEC_ID_PCM_U16LE:
case CODEC_ID_PCM_U16BE:
audio_input_frame_size >>= 1;
break;
default:
break;
}
} else {
audio_input_frame_size = c->frame_size;
}
samples = av_malloc(audio_input_frame_size * 2 * c->channels);
}
static int write_audio_frame(AVFormatContext *oc, AVStream *st, char * buf) {
AVCodecContext *c;
AVPacket pkt;
int audio_size = 320;
static int64_t audioLasttime = -1;
unsigned int audiotime = 0;
int64_t audiotimediff = 0;
int64_t InvCnt = 0;
static int64_t LastCnt = -1;
av_init_packet(&pkt);
c = st->codec;
if (c->coded_frame && c->coded_frame->pts != AV_NOPTS_VALUE) {
pkt.pts = av_rescale_q(c->coded_frame->pts, c->time_base,
st->time_base);
}
pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index = st->index;
pkt.data = buf; //audio_outbuf;
pkt.size = 320;
st->pts.val += 1;
/* write the compressed frame in the media file */
if (av_write_frame(oc, &pkt) != 0) {
// fprintf(stderr, "Error while writing audio frame\n");
LOGD("write audio frame fail");
return;
}
LOGD("write video frame success");
return 0;
}
static void close_audio(AVFormatContext *oc, AVStream *st) {
avcodec_close(st->codec);
av_free(samples);
av_free(audio_outbuf);
}
static int write_video_frame(AVFormatContext *oc, AVStream *st, char * buffer,
int size) {
int IsKey = 0;
static int64_t videoLasttime = -1;
unsigned int videotime = 0;
int64_t videotimediff = 0;
int64_t InvCnt = 0;
int ret;
AVCodecContext *c;
static int syncIframe = 0;
static int64_t lastCnt = -1;
c = st->codec;
LOGD("size:%d\n", size);
if (c->coded_frame == NULL) {
LOGD("coded_frame is null");
return 0;
}
IsKey = 1;
c->coded_frame->key_frame = IsKey;
#define SYNC_IDR_FRAME
#ifdef SYNC_IDR_FRAME
/* Code added so that every AVI starts with I frame or IDR frame */
if (IsKey) {
syncIframe = 1;
} else {
if (syncIframe == 0)
return 0;
}
#endif
if (size > 0) {
AVPacket pkt;
av_init_packet(&pkt);
if (c->coded_frame->pts != AV_NOPTS_VALUE)
pkt.pts = av_rescale_q(c->coded_frame->pts, c->time_base,
st->time_base);
if (c->coded_frame->key_frame)
pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index = st->index;
pkt.data = buffer; //video_outbuf;
pkt.size = size;
st->pts.val += 1;
pkt.pts = pkt.dts = c->coded_frame->pts;
ret = av_write_frame(oc, &pkt);
if (ret == 0) {
LOGD("write video frame success");
}
} else {
ret = 0;
}
if (ret != 0) {
LOGD("av_write_frame error:%d\n", ret);
return;
}
return 0;
}
static void close_video(AVFormatContext *oc, AVStream *st) {
avcodec_close(st->codec);
}
/*
* Class: com_android_concox_FFmpeg
* Method: H264DecoderInit
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264DecoderInit(
JNIEnv * env, jobject jobj) {
// iWidth = width;
// iHeight = height;
if (pAVCodecCtx != NULL) {
avcodec_close(pAVCodecCtx);
pAVCodecCtx = NULL;
}
if (pAVFrame != NULL) {
av_free(pAVFrame);
pAVFrame = NULL;
}
// Register all formats and codecs
av_register_all();
LOGD("avcodec register success");
//CODEC_ID_PCM_ALAW
pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (pAVCodec == NULL)
return -1;
//init AVCodecContext
pAVCodecCtx = avcodec_alloc_context3(pAVCodec);
if (pAVCodecCtx == NULL)
return -1;
/* we do not send complete frames */
if (pAVCodec->capabilities & CODEC_CAP_TRUNCATED)
pAVCodecCtx->flags |= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */
/* open it */
if (avcodec_open2(pAVCodecCtx, pAVCodec, NULL) < 0)
return avcodec_open2(pAVCodecCtx, pAVCodec, NULL);
av_init_packet(&mAVPacket);
pAVFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
if (pAVFrame == NULL)
return -1;
LOGD("avcodec context success");
//CreateYUVTab_16();
// yuv420p2rgb565_table_init();
LOGD("create yuv table success");
return 1;
}
/*
* Class: com_android_concox_FFmpeg
* Method: H264DecoderRelease
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264DecoderRelease(
JNIEnv * env, jobject jobj) {
if (pAVCodecCtx != NULL) {
avcodec_close(pAVCodecCtx);
pAVCodecCtx = NULL;
}
if (pAVFrame != NULL) {
av_free(pAVFrame);
av_free(pFrameYUV);
pAVFrame = NULL;
pFrameYUV = NULL;
}
DeleteYUVTab();
return 1;
}
/*
* Class: com_android_concox_FFmpeg
* Method: H264Decode
* Signature: ([BI[B)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264Decode(JNIEnv* env,
jobject thiz, jbyteArray in, jint inbuf_size, jintArray framePara,
jbyteArray out) {
int i;
jbyte * inbuf = (jbyte*) (*env)->GetByteArrayElements(env, in, 0);
jbyte * Picture = (jbyte*) (*env)->GetByteArrayElements(env, out, 0);
jint * para = (jint*) (*env)->GetIntArrayElements(env, framePara, 0);
av_frame_unref(pAVFrame);
av_frame_unref(pFrameYUV);
mAVPacket.data = inbuf;
mAVPacket.size = inbuf_size;
LOGD("mAVPacket.size:%d\n ", mAVPacket.size);
int len = -1, got_picture = 0;
struct timeval tm;
gettimeofday(&tm, NULL);
int start = tm.tv_sec * 1000000 + tm.tv_usec;
len = avcodec_decode_video2(pAVCodecCtx, pAVFrame, &got_picture,
&mAVPacket);
gettimeofday(&tm, NULL);
int end = tm.tv_sec * 1000000 + tm.tv_usec;
LOGD("DecodecostTime:%d\n", end - start);
iWidth = pAVCodecCtx->width;
iHeight = pAVCodecCtx->height;
*para = iHeight;
*para++;
*para = iWidth;
LOGD("iWidth:%d\n ", iWidth);
LOGD("iHeight:%d\n ", iHeight);
LOGD("len:%d\n", len);
if (len < 0) {
LOGD("len=-1,decode error");
return len;
}
if (got_picture > 0) {
LOGD("GOT PICTURE");
gettimeofday(&tm, NULL);
start = tm.tv_sec * 1000000 + tm.tv_usec;
int linesize[4] = { 2 * iWidth, 0, 0, 0 };
pImageConvertCtx = sws_getContext(pAVCodecCtx->width,
pAVCodecCtx->height, PIX_FMT_YUV420P, pAVCodecCtx->width,
pAVCodecCtx->height, PIX_FMT_RGB565, SWS_BICUBIC, 0, 0, 0);
LOGD("pAVFrame->data:%p\n", pAVFrame->data);
LOGD("pAVFrame->linesize:%d\n", pAVFrame->linesize[0]);
pFrameYUV->data[0] = Picture;
pFrameYUV->data[1] = NULL;
pFrameYUV->data[2] = NULL;
pFrameYUV->data[3] = NULL;
sws_scale(pImageConvertCtx, (const uint8_t* const *) pAVFrame->data,
pAVFrame->linesize, 0, pAVCodecCtx->height, pFrameYUV->data,
linesize);
// yuv420p2rgb565(pAVFrame, Picture, iWidth, iHeight);
gettimeofday(&tm, NULL);
end = tm.tv_sec * 1000000 + tm.tv_usec;
LOGD("TranscostTime:%d\n", end - start);
} else
LOGD("GOT PICTURE fail");
(*env)->ReleaseByteArrayElements(env, in, inbuf, 0);
(*env)->ReleaseByteArrayElements(env, out, Picture, 0);
(*env)->ReleaseIntArrayElements(env, framePara, para, 0);
return len;
}
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_GetFFmpegVersion(
JNIEnv * env, jobject jobj) {
return avcodec_version();
}
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_InitRecorder(JNIEnv * env,
jclass object, jstring filename, jint width, jint height) {
const char* str = (*env)->GetStringUTFChars(env, filename, NULL);
LOGD("InitRecorder");
av_register_all();
fmt = av_guess_format(NULL, str, NULL);
if (!fmt) {
//printf("Could not deduce output format from file extension: using MPEG.\n");
fmt = av_guess_format("mpeg", NULL, NULL);
}
if (!fmt) {
return;
}
/* allocate the output media context */
oc = avformat_alloc_context();
if (!oc) {
//fprintf(stderr, "Memory error\n");
return;
}
oc->oformat = fmt;
strncpy(oc->filename, str, sizeof(oc->filename));
/* add the audio and video streams using the default format codecs
and initialize the codecs */
video_st = NULL;
audio_st = NULL;
fmt->video_codec = AV_CODEC_ID_H264;
fmt->audio_codec = AV_CODEC_ID_PCM_ALAW;
if (fmt->video_codec != CODEC_ID_NONE) {
video_st = add_video_stream(oc, fmt->video_codec, width, height);
LOGD("add_video_stream");
}
if (fmt->audio_codec != CODEC_ID_NONE) {
audio_st = add_audio_stream(oc, fmt->audio_codec, 64000);
LOGD("add_audio_stream");
}
av_dump_format(oc, 0, str, 1);
LOGD("av_dump_format");
/* now that all the parameters are set, we can open the audio and
video codecs and allocate the necessary encode buffers */
if (video_st) {
open_video(oc, video_st);
LOGD("open_video");
}
if (audio_st) {
open_audio(oc, audio_st);
LOGD("open_audio");
}
/* open the output file, if needed */
if (!(fmt->flags & AVFMT_NOFILE)) {
//modified by caif
//if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {.
if (avio_open(&oc->pb, str, AVIO_FLAG_WRITE) < 0) {
// fprintf(stderr, "Could not open '%s'\n", filename);
LOGD("could not open file: %s\n", str);
return -1;
}
}
/* write the stream header, if any */
avformat_write_header(oc, NULL);
LOGD("avformat_write_header success");
}
/*
* Class: com_android_concox_FFmpeg
* Method: Record
* Signature: ([B)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_Record(JNIEnv * env,
jclass object, jbyteArray frame, jint length) {
jbyte * frameBuf = (jbyte*) (*env)->GetByteArrayElements(env, frame, 0);
int audio_ret = 0;
int video_ret = 0;
/* compute current audio and video time */
// if (audio_st)
//
// audio_pts = (double) audio_st->pts.val * audio_st->time_base.num
// / audio_st->time_base.den;
//
// else
// audio_pts = 0.0;
//
// LOGD("audio_pts:%d", audio_pts);
//
// if (video_st)
// video_pts = (double) video_st->pts.val * video_st->time_base.num
// / video_st->time_base.den;
// else
// video_pts = 0.0;
//
// LOGD("video_pts:%d", video_pts);
video_ret = -1;
audio_ret = -1;
if (length == 320) {
LOGD("WRITE AUDIO");
audio_ret = write_audio_frame(oc, audio_st, frameBuf);
} else {
LOGD("WRITE VIDEO");
video_ret = write_video_frame(oc, video_st, frameBuf, length);
}
(*env)->ReleaseByteArrayElements(env, frame, frameBuf, 0);
}
/*
* Class: com_android_concox_FFmpeg
* Method: ReleseRecorder
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_ReleseRecorder(
JNIEnv * env, jclass object) {
av_write_trailer(oc);
LOGD("av_write_trailer");
/* close each codec */
if (video_st)
close_video(oc, video_st);
if (audio_st)
close_audio(oc, audio_st);
/* free the streams */
int i;
for (i = 0; i < oc->nb_streams; i++) {
av_freep(&oc->streams[i]->codec);
av_freep(&oc->streams[i]);
}
if (!(fmt->flags & AVFMT_NOFILE)) {
/* close the output file */
avio_close(oc->pb);
}
/* free the stream */
av_free(oc);
}
#ifdef __cplusplus
}
#endif
配置好Eclipse中的NDK,build即可生成ffmpeguitl库文件。
com_android_concox_FFmpeg.h 文件:
/* DO NOT EDIT THIS FILE - it is machine generated */
#include
/* Header for class com_android_concox_FFmpeg */
#ifndef _Included_com_android_concox_FFmpeg
#define _Included_com_android_concox_FFmpeg
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: com_android_concox_FFmpeg
* Method: H264DecoderInit
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264DecoderInit
(JNIEnv *, jclass);
/*
* Class: com_android_concox_FFmpeg
* Method: H264DecoderRelease
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264DecoderRelease
(JNIEnv *, jclass);
/*
* Class: com_android_concox_FFmpeg
* Method: H264Decode
* Signature: ([BI[I[B)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_H264Decode
(JNIEnv *, jclass, jbyteArray, jint, jintArray, jbyteArray);
/*
* Class: com_android_concox_FFmpeg
* Method: GetFFmpegVersion
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_GetFFmpegVersion
(JNIEnv *, jclass);
/*
* Class: com_android_concox_FFmpeg
* Method: InitRecorder
* Signature: (Ljava/lang/String;II)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_InitRecorder
(JNIEnv *, jclass, jstring, jint, jint);
/*
* Class: com_android_concox_FFmpeg
* Method: Record
* Signature: ([BI)I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_Record
(JNIEnv *, jclass, jbyteArray, jint);
/*
* Class: com_android_concox_FFmpeg
* Method: ReleseRecorder
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_ReleseRecorder
(JNIEnv *, jclass);
#ifdef __cplusplus
}
#endif
#endif