由于windows不容易编ffmpeg,完全用ubuntu开发的话,在ubuntu上跑安卓模拟器很慢,
因此,
用windows跑x86安卓模拟器,
在ubuntu上编x86的ffmpeg,
采用x86是因为intel的虚拟化技术可以让安卓模拟器跟真机一样快,
我本机是win7,虚拟机是ubuntu10.04,最好使用64位的操作系统,之前我在32位的ubuntu上跑avd的时候报错。ubuntu最好也是最新的,这样gcc就是最新的,之前在做其他工程的时候,因为第三方库要求的gcc版本高,我就自己编译了最新版本的gcc,否则的话,即使目前没问题,将来也可能出问题。
【在虚拟机中跑sdl2.0+ffmpeg的例子】
下载ffmpeg,tar xf解压之,文件夹易名为ffmpeg-ubuntu,
下载sdl,tar zxvf解压之,文件夹易名为sdl2,
新建test文件夹,在里面新建VideoPlayer.h,内容如下,
#ifndef _VideoPlayer_H_ #define _VideoPlayer_H_ #define __STDC_CONSTANT_MACROS #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "SDL.h" typedef struct VideoPlayer { // ffmpeg struct AVFormatContext *pFormatContext; int videoIndex; struct AVCodecContext *pCodecContext; struct AVCodec *pCodec; struct AVFrame *pFrame; struct AVFrame *pFrameYUV; uint8_t *pOutBuf; struct SwsContext *pSwsContext; struct AVPacket *pPacket; // sdl struct SDL_Window *pWindow; struct SDL_Renderer *pRenderer; struct SDL_Texture *pTexture; struct SDL_Rect rect; }VideoPlayer; VideoPlayer *VideoPlayer_Create(); int VideoPlayer_Destroy(VideoPlayer *pVideoPlayer); int VideoPlayer_Init(VideoPlayer *pVideoPlayer, const char *path, int width, int height); int VideoPlayer_Frame(VideoPlayer *pVideoPlayer); int VideoPlayer_Shutdown(VideoPlayer *pVideoPlayer); #endif
新建VideoPlayer.c,内容如下,
#include "VideoPlayer.h" #include <android/log.h> #define LOG_TAG "video" // #undef LOG #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) VideoPlayer *VideoPlayer_Create() { VideoPlayer *pVideoPlayer; pVideoPlayer = (VideoPlayer*)malloc(sizeof(VideoPlayer)); pVideoPlayer->pFormatContext = NULL; pVideoPlayer->pCodecContext = NULL; pVideoPlayer->pCodec = NULL; pVideoPlayer->pFrame = NULL; pVideoPlayer->pFrameYUV = NULL; pVideoPlayer->pOutBuf = NULL; pVideoPlayer->pSwsContext = NULL; pVideoPlayer->pPacket = NULL; pVideoPlayer->pWindow = NULL; pVideoPlayer->pRenderer = NULL; pVideoPlayer->pTexture = NULL; return pVideoPlayer; } int VideoPlayer_Destroy(VideoPlayer *pVideoPlayer) { if(pVideoPlayer == NULL) { return -1; } if(pVideoPlayer->pSwsContext != NULL) { sws_freeContext(pVideoPlayer->pSwsContext); } if(pVideoPlayer->pFrameYUV != NULL) { av_frame_free(&pVideoPlayer->pFrameYUV); } if(pVideoPlayer->pFrame != NULL) { av_frame_free(&pVideoPlayer->pFrame); } if(pVideoPlayer->pCodecContext != NULL) { avcodec_close(pVideoPlayer->pCodecContext); } if(pVideoPlayer->pFormatContext != NULL) { avformat_close_input(&pVideoPlayer->pFormatContext); } free(pVideoPlayer); return 0; } int VideoPlayer_Init(VideoPlayer *pVideoPlayer, const char *path, int width, int height) { int i, videoIndex, screen_w, screen_h; if(pVideoPlayer == NULL) { return -1; } av_register_all(); avformat_network_init(); LOGD("avformat_open_input rtsp"); if(avformat_open_input(&pVideoPlayer->pFormatContext, path, NULL, NULL) != 0) { LOGD("Couldn't open input stream"); return -1; } LOGD("avformat_find_stream_info"); if(avformat_find_stream_info(pVideoPlayer->pFormatContext, NULL) < 0){ LOGD("Couldn't find stream information"); return -1; } pVideoPlayer->videoIndex = -1; for(i=0; i<pVideoPlayer->pFormatContext->nb_streams; i++) { if(pVideoPlayer->pFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { pVideoPlayer->videoIndex=i; break; } } if(pVideoPlayer->videoIndex == -1) { LOGD("Didn't find a video stream"); return -1; } LOGD("avcodec_find_decoder"); pVideoPlayer->pCodecContext = pVideoPlayer->pFormatContext->streams[pVideoPlayer->videoIndex]->codec; pVideoPlayer->pCodec = avcodec_find_decoder(pVideoPlayer->pCodecContext->codec_id); if(pVideoPlayer->pCodec == NULL) { LOGD("Codec not found"); return -1; } LOGD("avcodec_open2"); if(avcodec_open2(pVideoPlayer->pCodecContext, pVideoPlayer->pCodec, NULL) < 0) { LOGD("Could not open codec"); return -1; } pVideoPlayer->pFrame = av_frame_alloc(); pVideoPlayer->pFrameYUV = av_frame_alloc(); // pVideoPlayer->pOutBuf = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height)); // avpicture_fill((AVPicture *)pVideoPlayer->pFrameYUV, pVideoPlayer->pOutBuf, PIX_FMT_YUV420P, pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height); // // pVideoPlayer->pSwsContext = sws_getContext(pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height, pVideoPlayer->pCodecContext->pix_fmt, // pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); // 视频纹理 pVideoPlayer->pOutBuf = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, width, height)); avpicture_fill((AVPicture *)pVideoPlayer->pFrameYUV, pVideoPlayer->pOutBuf, PIX_FMT_YUV420P, width, height); // 转换器 pVideoPlayer->pSwsContext = sws_getContext(pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height, pVideoPlayer->pCodecContext->pix_fmt, width, height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { LOGD( "Could not initialize SDL - %s", SDL_GetError()); return -1; } screen_w = pVideoPlayer->pCodecContext->width; screen_h = pVideoPlayer->pCodecContext->height; // pVideoPlayer->pWindow = SDL_CreateWindow("video", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL_WINDOW_OPENGL); pVideoPlayer->pWindow = SDL_CreateWindow("video", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, width, height, SDL_WINDOW_OPENGL); if(!pVideoPlayer->pWindow) { LOGD("SDL: could not create window - exiting:%s\n",SDL_GetError()); return -1; } pVideoPlayer->pRenderer = SDL_CreateRenderer(pVideoPlayer->pWindow, -1, 0); //IYUV: Y + U + V (3 planes) //YV12: Y + V + U (3 planes) // pVideoPlayer->pTexture = SDL_CreateTexture(pVideoPlayer->pRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pVideoPlayer->pCodecContext->width, pVideoPlayer->pCodecContext->height); // 表面纹理 pVideoPlayer->pTexture = SDL_CreateTexture(pVideoPlayer->pRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, width, height); pVideoPlayer->rect.x=0; pVideoPlayer->rect.y=0; // pVideoPlayer->rect.w = screen_w; // pVideoPlayer->rect.h = screen_h; pVideoPlayer->rect.w = width; pVideoPlayer->rect.h = height; return 0; } int VideoPlayer_Frame(VideoPlayer *pVideoPlayer) { long ts; int ret, getPicture; if(pVideoPlayer == NULL) { return -1; } pVideoPlayer->pPacket=(AVPacket *)av_malloc(sizeof(AVPacket)); if(av_read_frame(pVideoPlayer->pFormatContext, pVideoPlayer->pPacket) >= 0) { if(pVideoPlayer->pPacket->stream_index == pVideoPlayer->videoIndex) { ts = time(); ret = avcodec_decode_video2(pVideoPlayer->pCodecContext, pVideoPlayer->pFrame, &getPicture, pVideoPlayer->pPacket); if(ret < 0) { LOGD("Decode Error"); return -1; } LOGD("avcodec_decode_video2 %d", time() - ts); if(getPicture) { ts = time(); sws_scale(pVideoPlayer->pSwsContext, (const uint8_t* const*)pVideoPlayer->pFrame->data, pVideoPlayer->pFrame->linesize, 0, pVideoPlayer->pCodecContext->height, pVideoPlayer->pFrameYUV->data, pVideoPlayer->pFrameYUV->linesize); LOGD("sws_scale %d", time() - ts); // SDL_UpdateYUVTexture(pVideoPlayer->pTexture, &pVideoPlayer->rect, // pVideoPlayer->pFrameYUV->data[0], pVideoPlayer->pFrameYUV->linesize[0], // pVideoPlayer->pFrameYUV->data[1], pVideoPlayer->pFrameYUV->linesize[1], // pVideoPlayer->pFrameYUV->data[2], pVideoPlayer->pFrameYUV->linesize[2]); ts = time(); SDL_UpdateTexture(pVideoPlayer->pTexture, &pVideoPlayer->rect, pVideoPlayer->pFrameYUV->data[0], pVideoPlayer->pFrameYUV->linesize[0] ); LOGD("SDL_UpdateTexture %d", time() - ts); ts = time(); SDL_RenderClear(pVideoPlayer->pRenderer); SDL_RenderCopy(pVideoPlayer->pRenderer, pVideoPlayer->pTexture, NULL, &pVideoPlayer->rect); SDL_RenderPresent(pVideoPlayer->pRenderer); LOGD("Render %d", time() - ts); SDL_Delay(42); } } av_free_packet(pVideoPlayer->pPacket); } // flush frames remained in codec // while(1) // { // ret = avcodec_decode_video2(pVideoPlayer->pCodecContext, pVideoPlayer->pFrame, &getPicture, pVideoPlayer->pPacket); // if (ret < 0) // break; // if (!getPicture) // break; // // sws_scale(pVideoPlayer->pSwsContext, (const uint8_t* const*)pVideoPlayer->pFrame->data, pVideoPlayer->pFrame->linesize, 0, pVideoPlayer->pCodecContext->height, // pVideoPlayer->pFrameYUV->data, pVideoPlayer->pFrameYUV->linesize); // // SDL_UpdateTexture(pVideoPlayer->pTexture, &pVideoPlayer->rect, pVideoPlayer->pFrameYUV->data[0], pVideoPlayer->pFrameYUV->linesize[0] ); // SDL_RenderClear(pVideoPlayer->pRenderer); // SDL_RenderCopy(pVideoPlayer->pRenderer, pVideoPlayer->pTexture, NULL, &pVideoPlayer->rect); // SDL_RenderPresent(pVideoPlayer->pRenderer); // // SDL_Delay(40); // } return 0; } int VideoPlayer_Shutdown(VideoPlayer *pVideoPlayer) { if(pVideoPlayer == NULL) { return -1; } SDL_Quit(); return 0; }
新建main.c,内容如下,
#include "VideoPlayer.h" int main(int argc, char* argv[]) { Uint8 done = 0; SDL_Event event; VideoPlayer *pVideoPlayer = VideoPlayer_Create(); VideoPlayer_Init(pVideoPlayer, "rtsp://192.168.1.155:8557/h264", 480, 320); /* Main render loop */ while(!done) { /* Check for events */ while(SDL_PollEvent(&event)) { if(event.type == SDL_QUIT || event.type == SDL_KEYDOWN || event.type == SDL_FINGERDOWN) { done = 1; } } VideoPlayer_Frame(pVideoPlayer); } VideoPlayer_Shutdown(pVideoPlayer); VideoPlayer_Destroy(pVideoPlayer); return 0; }
新建Makefile,内容如下,
CC=g++ GCC=gcc SRC=main.c OBJSDIR=objs/x86/__ COBJS=$(patsubst %.c, $(OBJSDIR)/%.o, $(filter %.c, $(SRC))) CPPOBJS=$(patsubst %.cpp, $(OBJSDIR)/%.o, $(filter %.cpp, $(SRC))) INCS=-I. -I../ffmpeg-ubuntu/include -I../sdl2/include CFLAGS=-c -g CPPFLAGS= LFLAGS=-lSDL2 -lSDL2main -lavformat -lavcodec -lavutil -lswscale -lswresample TARGET=./bin all: $(TARGET) $(TARGET): $(COBJS) $(CPPOBJS) $(CC) $(COBJS) $(CPPOBJS) -o $(TARGET) $(LFLAGS) $(COBJS): $(OBJSDIR)/%.o: %.c @test -d $(dir $@) || mkdir -p $(dir $@) $(GCC) $(INCS) $(CFLAGS) $< -o $@ $(CPPOBJS): $(OBJSDIR)/%.o: %.cpp @test -d $(dir $@) || mkdir -p $(dir $@) $(CC) $(INCS) $(CFLAGS) $< -o $@ rebuild: clean all test: @echo $(COBJS) $(CPPOBJS) clean: rm -f $(TARGET) $(COBJS) $(CPPOBJS)
编译并运行,就可以看到rtsp流了,
如果报某些动态库找不到的错,
sudo /etc/ld.so.conf
加上/usr/local/lib
sudo ldconfig
如果报下面的错,
Could not initialize SDL library: No avaiable video device
就安装x11,
sudo apt-get install libx11-dev
sudo apt-get install xorg-dev
再次进入sdl目录,
make clean
./configure
make
sudo make install
【搭windows的安卓环境】
安装jdk,设置环境变量JAVA_HOME
安装eclipse,
安装安卓sdk,
安装安卓ndk,设置环境变量NDK_ROOT
离线安装adt(在线的话很慢),
安装intel的虚拟化技术haxm,这样可以使x86的avd启动的很快,sdl本身可以编出x86版本的而ffmpeg网上的大部分文章,讲的都是如何编arm版本的,编x86版本的有很大不同
没安装cygwin,一是有ubuntu虚拟机,二是最新版的ndk10用不到它了
【在安卓上跑sdl2.0的例子】
把sdl中的android-project拷出来,并易名为video-x86,在里面的jni下
新建SDL,把sdl中的include、src和Android.mk拷进去,
并在src下新建bmp.c,内容如下,
#include <stdlib.h> #include <stdio.h> #include <time.h> #include "SDL.h" typedef struct Sprite { SDL_Texture* texture; Uint16 w; Uint16 h; } Sprite; /* Adapted from SDL's testspriteminimal.c */ Sprite LoadSprite(const char* file, SDL_Renderer* renderer) { Sprite result; result.texture = NULL; result.w = 0; result.h = 0; SDL_Surface* temp; /* Load the sprite image */ temp = SDL_LoadBMP(file); if (temp == NULL) { fprintf(stderr, "Couldn't load %s: %s\n", file, SDL_GetError()); return result; } result.w = temp->w; result.h = temp->h; /* Create texture from the image */ result.texture = SDL_CreateTextureFromSurface(renderer, temp); if (!result.texture) { fprintf(stderr, "Couldn't create texture: %s\n", SDL_GetError()); SDL_FreeSurface(temp); return result; } SDL_FreeSurface(temp); return result; } void draw(SDL_Window* window, SDL_Renderer* renderer, const Sprite sprite) { int w, h; SDL_GetWindowSize(window, &w, &h); SDL_Rect destRect = {w/2 - sprite.w/2, h/2 - sprite.h/2, sprite.w, sprite.h}; /* Blit the sprite onto the screen */ SDL_RenderCopy(renderer, sprite.texture, NULL, &destRect); } int main(int argc, char *argv[]) { SDL_Window *window; SDL_Renderer *renderer; if(SDL_CreateWindowAndRenderer(0, 0, 0, &window, &renderer) < 0) exit(2); Sprite sprite = LoadSprite("image.bmp", renderer); if(sprite.texture == NULL) exit(2); /* Main render loop */ Uint8 done = 0; SDL_Event event; while(!done) { /* Check for events */ while(SDL_PollEvent(&event)) { if(event.type == SDL_QUIT || event.type == SDL_KEYDOWN || event.type == SDL_FINGERDOWN) { done = 1; } } /* Draw a gray background */ SDL_SetRenderDrawColor(renderer, 0xA0, 0xA0, 0xA0, 0xFF); SDL_RenderClear(renderer); draw(window, renderer, sprite); /* Update the screen! */ SDL_RenderPresent(renderer); SDL_Delay(10); } exit(0); }
http://www.dinomage.com/wp-content/uploads/2013/01/image.bmp,下载这张bmp,放到assets下,就可以在模拟器上看到图片了
【在虚拟机里编x86的ffmpeg】
再解压ffmpeg,并易名为ffmpeg-x86,新建x86.sh,
#!/bin/bash NDK=/home/mynamepfd/android/ndk8 PLATFORM=$NDK/platforms/android-14/arch-x86 PREBUILT=$NDK/toolchains/x86-4.6/prebuilt/linux-x86 function build_one { ./configure \ --target-os=linux \ --prefix=$PREFIX \ --enable-cross-compile \ --extra-libs="-lgcc" \ --arch=x86 \ --cc=$PREBUILT/bin/i686-linux-android-gcc \ --cross-prefix=$PREBUILT/bin/i686-linux-android- \ --nm=$PREBUILT/bin/i686-linux-android-nm \ --sysroot=$PLATFORM \ --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 -fasm -Wno-psabi -fno-short-enums -fno-strict-aliasing -finline-limit=300 $OPTIMIZE_CFLAGS " \ --disable-static \ --enable-shared \ --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -nostdlib -lc -lm" \ --disable-asm \ --disable-yasm \ $ADDITIONAL_CONFIGURE_FLAG make clean make -j4 install } CPU=x86 OPTIMIZE_CFLAGS="-march=atom -ffast-math -msse3 -mfpmath=sse" PREFIX=$(pwd)/android/$CPU ADDITIONAL_CONFIGURE_FLAG= build_one
NDK,PLATFORM和PREBUILT要跟你的匹配上,其他的参数一定要写对,不然很伤的,
之前我把--cross-prefix写错了,明明是找不到gcc,却一直报xxx is unable to create an executable file的错,
如果直接编译,那编译出来的so文件类似libavcodec.so.xxx,版本号位于so之后,Android没法识别。
修改configure,将该文件中的如下四行,
SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)'
LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'
SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)'
SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR)$(SLIBNAME)'
替换为,
SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)'
LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'
SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)'
SLIB_INSTALL_LINKS='$(SLIBNAME)'
编出来的动态库很多很大,下面的程序打成apk有十几兆,可以根据需要自己裁剪
在jni下新建ffmpeg,把x86下的include跟lib目录拷进去,新建Android.mk,内容如下
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
LOCAL_SRC_FILES:= lib/libavcodec-56.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavdevice
LOCAL_SRC_FILES:= lib/libavdevice-56.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter
LOCAL_SRC_FILES:= lib/libavfilter-5.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
LOCAL_SRC_FILES:= lib/libavformat-56.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
LOCAL_SRC_FILES:= lib/libavutil-54.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswresample
LOCAL_SRC_FILES:= lib/libswresample-1.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
LOCAL_SRC_FILES:= lib/libswscale-3.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
这段mk是让安卓加载我们的动态库的,
修改SDLActivity.java,
// Load the .so static { System.loadLibrary("SDL2"); System.loadLibrary("avcodec-56"); System.loadLibrary("avdevice-56"); System.loadLibrary("avfilter-5"); System.loadLibrary("avformat-56"); System.loadLibrary("avutil-54"); System.loadLibrary("swresample-1"); System.loadLibrary("swscale-3"); //System.loadLibrary("SDL2_image"); //System.loadLibrary("SDL2_mixer"); //System.loadLibrary("SDL2_net"); //System.loadLibrary("SDL2_ttf"); System.loadLibrary("main"); }
把VideoPlayer.h、VideoPlayer.c和main.c拷到src下,并修改Android.mk,
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := main
SDL_PATH := ../SDL
FFMPEG_PATH := ../ffmpeg
LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(SDL_PATH)/include \
$(LOCAL_PATH)/$(FFMPEG_PATH)/include
# Add your application source files here...
LOCAL_SRC_FILES := $(SDL_PATH)/src/main/android/SDL_android_main.c \
main.c \
VideoPlayer.c
LOCAL_SHARED_LIBRARIES := SDL2 avcodec avdevice avfilter avformat avutil swresample swscale
LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -llog
include $(BUILD_SHARED_LIBRARY)
修改AndroidManifest.xml,添加访问网络的权限,接下来就可以在模拟器上看到rtsp流了!
arm.sh,内容如下
#!/bin/bash NDK=/home/mynamepfd/android/ndk8 PLATFORM=$NDK/platforms/android-14/arch-arm PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86 function build_one { ./configure \ --target-os=linux \ --prefix=$PREFIX \ --enable-cross-compile \ --extra-libs="-lgcc" \ --arch=arm \ --cc=$PREBUILT/bin/arm-linux-androideabi-gcc \ --cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \ --nm=$PREBUILT/bin/arm-linux-androideabi-nm \ --sysroot=$PLATFORM \ --extra-cflags=" $CFLAGS $OPTIMIZE_CFLAGS " \ --disable-static \ --enable-shared \ --extra-ldflags=" $EXTRA_LDFLAGS " \ --disable-asm \ --disable-yasm make clean make -j4 install } CPU=arm PREFIX=$(pwd)/android/$CPU CFLAGS="-O3 -Wall -mthumb -pipe -fpic -fasm \ -finline-limit=300 -ffast-math \ -fstrict-aliasing -Werror=strict-aliasing \ -fmodulo-sched -fmodulo-sched-allow-regmoves \ -Wno-psabi -Wa,--noexecstack \ -D__ARM_ARCH_5__ -D__ARM_ARCH_5E__ \ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5TE__ \ -DANDROID -DNDEBUG" EXTRA_CFLAGS="-march=armv7-a -mfpu=neon \ -mfloat-abi=softfp -mvectorize-with-neon-quad" EXTRA_LDFLAGS="-Wl,--fix-cortex-a8" build_one
如果报symbol signal xxx这样的错,
修改Application.mk,添加APP_PLATFORM:=android-10,
这样可以在真机上播放rtsp流!
想要播放本地的视频文件,要将它们放到sd卡里,并使用/sdcard/xxx这样的路径来访问,还要添加读外部存储区的权限!
对虚拟机来说,可以在eclipse的DDMS里向虚拟机的sd卡添加文件
End