Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2

接上一篇。

activity_flv.xml


xmlns:tools="http://schemas.android.com/tools"

android:layout_width="match_parent"

android:layout_height="match_parent"

tools:context=".FlvActivity">

android:layout_width="match_parent"

android:layout_height="match_parent"

android:orientation="horizontal">

android:id="@+id/take_button"

android:layout_width="wrap_content"

android:layout_height="match_parent"

android:text="open"/>

android:id="@+id/surfaceView1"

android:layout_width="0dp"

android:layout_height="match_parent"

android:layout_weight="1"/>


最后偷懒贴个图build.gradle

Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2_第1张图片
build.gradle


忘记上jni文件了,直接走起


#include

#include"libavcodec/avcodec.h"

#include"libavformat/avformat.h"

#include"libavutil/time.h"

#ifdefANDROID

#include

#include

#defineLOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR,"(>_<)",format, ##__VA_ARGS__)

#defineLOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,"(=_=)",format, ##__VA_ARGS__)

#else

#define LOGE(format, ...)  printf("(>_<) "format"\n", ##__VA_ARGS__)

#define LOGI(format, ...)  printf("(^_^) "format"\n", ##__VA_ARGS__)

#endif

AVFormatContext*ofmt_ctx;

AVStream* video_st;

AVCodecContext* pCodecCtx;

AVCodec* pCodec;

AVPacketenc_pkt;

AVFrame*pFrameYUV;

intframecnt =0;

intyuv_width;

intyuv_height;

inty_length;

intuv_length;

int64_tstart_time;

//Output FFmpeg's av_log()

voidcustom_log(void*ptr,intlevel,const char* fmt,va_listvl){

FILE*fp=fopen("/storage/emulated/0/av_log.txt","a+");

if(fp){

vfprintf(fp,fmt,vl);

fflush(fp);

fclose(fp);

}

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_initial

(JNIEnv*env,jobjectobj,jintwidth,jintheight)

{

const char* out_path ="/storage/emulated/0/testffmpeg.flv";

yuv_width=width;

yuv_height=height;

y_length=width*height;

uv_length=width*height/4;

//FFmpeg av_log() callback

av_log_set_callback(custom_log);

av_register_all();

//output initialize

avformat_alloc_output_context2(&ofmt_ctx,NULL,"flv", out_path);

//output encoder initialize

pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);

if(!pCodec){

LOGE("Can not find encoder!\n");

return-1;

}

pCodecCtx = avcodec_alloc_context3(pCodec);

pCodecCtx->pix_fmt=AV_PIX_FMT_YUV420P;

pCodecCtx->width= width;

pCodecCtx->height= height;

pCodecCtx->time_base.num=1;

pCodecCtx->time_base.den=30;

pCodecCtx->bit_rate=800000;

pCodecCtx->gop_size=300;

/* Some formats want stream headers to be separate. */

if(ofmt_ctx->oformat->flags&AVFMT_GLOBALHEADER)

pCodecCtx->flags|=CODEC_FLAG_GLOBAL_HEADER;

//H264 codec param

//pCodecCtx->me_range = 16;

//pCodecCtx->max_qdiff = 4;

//pCodecCtx->qcompress = 0.6;

pCodecCtx->qmin=10;

pCodecCtx->qmax=51;

//Optional Param

pCodecCtx->max_b_frames=3;

// Set H264 preset and tune

AVDictionary*param =0;

av_dict_set(¶m,"preset","ultrafast",0);

av_dict_set(¶m,"tune","zerolatency",0);

if(avcodec_open2(pCodecCtx, pCodec, ¶m) <0){

LOGE("Failed to open encoder!\n");

return-1;

}

//Add a new stream to output,should be called by the user before avformat_write_header() for muxing

video_st = avformat_new_stream(ofmt_ctx, pCodec);

if(video_st ==NULL){

return-1;

}

video_st->time_base.num=1;

video_st->time_base.den=30;

video_st->codec= pCodecCtx;

//Open output URL,set before avformat_write_header() for muxing

if(avio_open(&ofmt_ctx->pb, out_path,AVIO_FLAG_READ_WRITE) <0){

LOGE("Failed to open output file!\n");

return-1;

}

//Write File Header

avformat_write_header(ofmt_ctx,NULL);

start_time = av_gettime();

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_encode

(JNIEnv*env,jobjectobj,jbyteArrayyuv)

{

intret;

intenc_got_frame=0;

inti=0;

pFrameYUV = av_frame_alloc();

uint8_t*out_buffer = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

avpicture_fill((AVPicture*)pFrameYUV, out_buffer,AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

//��׿����ͷ����ΪNV21��ʽ���˴�����ת��ΪYUV420P��ʽ

jbyte* in= (jbyte*)(*env)->GetByteArrayElements(env,yuv,0);

memcpy(pFrameYUV->data[0],in,y_length);

for(i=0;i

{

*(pFrameYUV->data[2]+i)=*(in+y_length+i*2);

*(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);

}

pFrameYUV->format=AV_PIX_FMT_YUV420P;

pFrameYUV->width= yuv_width;

pFrameYUV->height= yuv_height;

enc_pkt.data=NULL;

enc_pkt.size=0;

av_init_packet(&enc_pkt);

ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);

av_frame_free(&pFrameYUV);

if(enc_got_frame ==1){

LOGI("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);

framecnt++;

enc_pkt.stream_index= video_st->index;

//Write PTS

AVRationaltime_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 = {60,2};//{ 50, 2 };

AVRationaltime_base_q = {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration = (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//�ڲ�ʱ���

//Parameters

//enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pts= av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts= enc_pkt.pts;

enc_pkt.duration= av_rescale_q(calc_duration, time_base_q, time_base);//(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pos= -1;

//Delay

int64_tpts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);

int64_tnow_time = av_gettime() - start_time;

if(pts_time > now_time)

av_usleep(pts_time - now_time);

ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

av_free_packet(&enc_pkt);

}

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_flush

(JNIEnv*env,jobjectobj)

{

intret;

intgot_frame;

AVPacketenc_pkt;

if(!(ofmt_ctx->streams[0]->codec->codec->capabilities&

CODEC_CAP_DELAY))

return0;

while(1) {

enc_pkt.data=NULL;

enc_pkt.size=0;

av_init_packet(&enc_pkt);

ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,

NULL, &got_frame);

if(ret <0)

break;

if(!got_frame){

ret =0;

break;

}

LOGI("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);

//Write PTS

AVRationaltime_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 = {60,2};

AVRationaltime_base_q = {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration = (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//�ڲ�ʱ���

//Parameters

enc_pkt.pts= av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts= enc_pkt.pts;

enc_pkt.duration= av_rescale_q(calc_duration, time_base_q, time_base);

//ת��PTS/DTS��Convert PTS/DTS��

enc_pkt.pos= -1;

framecnt++;

ofmt_ctx->duration= enc_pkt.duration* framecnt;

/* mux encoded frame */

ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

if(ret <0)

break;

}

//Write file trailer

av_write_trailer(ofmt_ctx);

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_close

(JNIEnv*env,jobjectobj)

{

if(video_st)

avcodec_close(video_st->codec);

avio_close(ofmt_ctx->pb);

avformat_free_context(ofmt_ctx);

return0;

}




效果图,生成的flv文件在sd卡根目录下


Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2_第2张图片

Happy ending

你可能感兴趣的:(Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2)