sws_getContext(...)
创建像素格式转换的上下文,每次会创建新的空间。多线程建议使用。
struct SwsContext *sws_getCachedContext(...)
创建像素格式转换的上下文,不会每次创建新的空间。会根据传入的上下文在缓冲里面查找,如果输入输出参数没有变化,就会返回缓冲中已经存在的swsContext。如果发生了变化,则会清除掉之前创建的swsContext,重新创建一个新的。
sws_getCachedContext(
struct SwsContext *context,
int srcW, int srcH,
enum AVPixelFormat srcFormat,
int dstW, int dstH,
enum AVPixelFormat dstFormat,
int flags,
SwsFilter *srcFilter,
SwsFilter *dstFilter,
const double *param);
参数说明:
#define SWS_FAST_BILINEAR 1
#define SWS_BILINEAR 2
#define SWS_BICUBIC 4
#define SWS_X 8
#define SWS_POINT 0x10
#define SWS_AREA 0x20
#define SWS_BICUBLIN 0x40
后面还有两个参数是做过滤器用的,一般用不到,传NULL,最后一个参数是跟flag算法相关,也可以传NULL。
int sws_scale(...)
转换数据
int sws_scale(struct SwsContext *c,
const uint8_t *const srcSlice[],
const int srcStride[],
int srcSliceY, int srcSliceH,
uint8_t *const dst[],
const int dstStride[]);
参数说明
void sws_freeContext(struct SwsContext *swsContext);
清理swsContext
sws_freeContext(swsContext);
swsContext = null; //记得把swsContext置为空
// 初始化像素格式转换上下文
SwsContext *vctx = NULL;
int outWidth = 1280;
int outHeight = 720;
char *rgb = new char[1920*1080*4];
//获取像素格式化上下文
vctx = sws_getCachedContext(
vctx,
frame->width,
frame->height,
(AVPixelFormat)frame->format,
outWidth,
outHeight,
AV_PIX_FMT_RGBA,
SWS_FAST_BILINEAR,
0, 0, 0);
if (!vctx)
{
LOGE("sws_getCachedContext failed!");
} else
{
// 开始像素格式转换
uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
data[0] = (uint8_t *)rgb;
int lines[AV_NUM_DATA_POINTERS] = {0};
lines[0] = outWidth * 4;
int h = sws_scale(
vctx,
(const uint8_t **)frame->data,
frame->linesize,
0,
frame->height,
data, lines
);
LOGI("sws_scale = %d", h);
}
注意:
像素格式的相关函数包含在 libswscale.so 库中
同时在代码中包含头文件 #include
经过上面的格式转换之后,我们就可以用GLSurfaceView来绘制视频画面了。
在cmake中添加android库
target_link_libraries( # Specifies the target library.
native-lib
android
avcodec avformat avutil swresample swscale
# Links the target library to the log library
# included in the NDK.
${log-lib})
引入头文件
#include
#include
native代码参考
//显示窗口初始化
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
ANativeWindow_setBuffersGeometry(nativeWindow, outWidth, outHeight, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer nativeWindowBuffer; //后备缓冲
//在surfaceView上显示图像
ANativeWindow_lock(nativeWindow, &nativeWindowBuffer, 0);
uint8_t *dst = (uint8_t *)nativeWindowBuffer.bits;
//因为格式为RGB8888,所以像素点的个数为w*h*4
memcpy(dst, rgb, outWidth*outHeight*4);
ANativeWindow_unlockAndPost(nativeWindow);
java代码参考
public class TestPlay extends GLSurfaceView implements Runnable,SurfaceHolder.Callback {
public TestPlay(Context context) {
super(context);
}
public TestPlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
public void run() {
open("/sdcard/test1.mp4", getHolder().getSurface());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
new Thread(this).start();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
public native void open(String path, Object surface);
}
最后在xml文件中引用TestPlay控件即可显示出图像。