只是调通,但是包括显示效果和BUG都还不少。
还未做优化,刚测试,播出来的太卡,而且大概只能播十秒钟左右(不知道这个是不是我RTCP服务器设置了缓存限制)。怕自己忘了思路,所以赶紧总结一下,为明天做准备。
没有设置AVPacket的时间戳,可能是影响的原因。
在昨天的基础上,用av_write_frame();把之前得到的h264压缩数据发出去。
虽然好不容易在PC端可以看到手机的图像,但是现在还高兴不出来。只能说程序调通了。
Android出来的data[]数据,直接调用的底层发送,但是底层是耗时操作,一直调用底层的函数,并发送出去,问题在于控制线程安全。
大概优化的思路有:1、设置AVPacket的时间戳。2、控制线程。3、看看AVCodec有没有什么需要优化的地方。4、今天弄的发送RTMP的AVFormatContext的设置。5、暂时想到这么多。
jni层
#include
#include
#include
#include
#include
#include
#include
#include "ffmpeg/libavformat/avformat.h"
#include "ffmpeg/libavdevice/avdevice.h"
#include "ffmpeg/libavfilter/avfilter.h"
#include "ffmpeg/libswscale/swscale.h"
#include "ffmpeg/libswresample/swresample.h"
#include "ffmpeg/libavutil/avutil.h"
#include "ffmpeg/libavutil/opt.h"
#include "ffmpeg/libavutil/imgutils.h"
#include "ffmpeg/libavutil/log.h"
#define TEST_H264 1
#include
#include
AVCodec *pCodec;
AVCodecContext *pCodecCtx = NULL;
int i, ret, got_output;
FILE *fp_out;
AVFrame *pFrame;
AVPacket pkt;
int y_size;
int framecnt = 0;
char filename_out[] = "/storage/emulated/0/yourname.h264";
//char output_str[]="rtmp://192.168.1.101:1935/live/suny";
int in_w = 864, in_h = 480;
int count = 0;
//保存输出的格式
AVOutputFormat *ofmt = NULL;
//初始化输出的AVFormatContext
AVFormatContext *ofmt_ctx = NULL;
int64_t start_time;
JNIEXPORT jint JNICALL Java_com_cpi_ffmpeg_FFMpegLib_getVersion(JNIEnv *env,
jclass jclass,jstring output_jstr) {
char output_str[500]={0};
sprintf(output_str,"%s",(*env)->GetStringUTFChars(env,output_jstr, NULL));
av_register_all();
avcodec_register_all();
//初始化网络传输功能
avformat_network_init();
//初始化输出的AVFormatContext,格式为RTMP
//RTMP协议使用的封装格式是FLV,FLV中的AAC不是ADTS格式
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv",output_str); //RTMP
if (!ofmt_ctx) {
return ret;
}
//保存输出格式 1
ofmt = ofmt_ctx->oformat;
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
printf("Codec not found\n");
return -1;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx) {
printf("Could not allocate video codec context\n");
return -1;
}
pCodecCtx->bit_rate = 400000;
pCodecCtx->width = in_w;
pCodecCtx->height = in_h;
pCodecCtx->time_base.num = 1;
pCodecCtx->time_base.den = 20;
pCodecCtx->gop_size = 10;
pCodecCtx->max_b_frames = 5;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(pCodecCtx->priv_data, "preset", "superfast", 0);
// av_opt_set(pCodecCtx->priv_data, "preset", "slow", 0);
av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec\n");
return -1;
}
if ((fp_out = fopen(filename_out, "wb")) == NULL) {
return -1;
}
y_size = pCodecCtx->width * pCodecCtx->height;
AVStream *out_stream = avformat_new_stream(ofmt_ctx, pCodecCtx->codec);
if (!out_stream) {
return -001;
}
avcodec_copy_context(out_stream->codec, pCodecCtx);
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
//Open output URL
if (!(ofmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&ofmt_ctx->pb, output_str, AVIO_FLAG_WRITE);
}
//Write file header
ret = avformat_write_header(ofmt_ctx, NULL);
// start_time=av_gettime();
return 1;
}
JNIEXPORT jint JNICALL Java_com_cpi_ffmpeg_FFMpegLib_Encoding(JNIEnv *env,
jclass jclass, jbyteArray yuvdata) {
jbyte *yuv420sp = (jbyte*) (*env)->GetByteArrayElements(env, yuvdata, 0);
// av_opt_set(pCodecCtx->priv_data, "preset", "superfast", 0);
// av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
pFrame = av_frame_alloc();
if (!pFrame) {
printf("Could not allocate video frame\n");
return -1;
}
pFrame->format = pCodecCtx->pix_fmt;
pFrame->width = pCodecCtx->width;
pFrame->height = pCodecCtx->height;
ret = av_image_alloc(pFrame->data, pFrame->linesize, pCodecCtx->width,
pCodecCtx->height, pCodecCtx->pix_fmt, 16);
if (ret < 0) {
printf("Could not allocate raw picture buffer\n");
return -1;
}
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
//Read raw YUV data 这里出错了,是按YUV_SP处理的 应该是YUV_P
pFrame->data[0] = yuv420sp; //PCM Data
pFrame->data[1] = yuv420sp + y_size * 5 / 4; // V
pFrame->data[2] = yuv420sp + y_size; // U
pFrame->pts = count;
count++;
/* encode the image */
ret = avcodec_encode_video2(pCodecCtx, &pkt, pFrame, &got_output);
int sizee = pkt.size;
if (ret < 0) {
printf("Error encoding frame\n");
return -1;
}
if (got_output) {
printf("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, pkt.size);
framecnt++;
//不写文件了,直接返回
//fwrite(pkt.data, 1, pkt.size, fp_out);
////----------------------------------
int a = av_write_frame(ofmt_ctx, &pkt);
if(!a){
return a;
}
////----------------------------------
av_free_packet(&pkt);
av_freep(&pFrame->data[0]);
av_frame_free(&pFrame);
//(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
// return framecnt;
}
//av_freep(&pFrame->data[0]);
//av_frame_free(&pFrame);
(*env)->ReleaseByteArrayElements(env, yuvdata, yuv420sp, 0);
return 1;
}
JNIEXPORT jint JNICALL Java_com_cpi_ffmpeg_FFMpegLib_CloseVideo(JNIEnv *env,
jclass jclass) {
av_write_trailer(ofmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
for (got_output = 1; got_output; i++) {
ret = avcodec_encode_video2(pCodecCtx, &pkt, NULL, &got_output);
if (ret < 0) {
printf("Error encoding frame\n");
return -1;
}
if (got_output) {
printf("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n",
pkt.size);
fwrite(pkt.data, 1, pkt.size, fp_out);
av_free_packet(&pkt);
}
}
fclose(fp_out);
avcodec_close(pCodecCtx);
av_free(pCodecCtx);
av_freep(&pFrame->data[0]);
av_frame_free(&pFrame);
return 0;
}
显示效果: