HLS系统快速部署指南

作者:阿宝
版本:1.0.0
日期:2015-06-10

介绍

在开源环境下,实现HLS直播系统,较为常见的有两种方式,nginx+nginx-rtmp-module 或 SRS,这里采用前者,同时配合ffmpeg来实现。
本文是在 武爱敏博客 的基础上补充完善而成,在此一并表示感谢。goto

环境

系统:CentOS 6.6 x64
软件:
yasm-1.3.0
x264-201506
lame-3.99.5
libogg-1.3.2
libvorbis-1.3.5
libvpx-201506
faad2-2.7
faac-1.28
xvidcore-1.3.2
ffmpeg-201506
nginx-1.8.0
nginx-rtmp-module-1.1.7
openssl-0.9.8zf

简易编译安装

为了简化安装的流程,此处提供一个一键安装包 goto,访问密码 00a8
包含:
所有需要安装的软件包(faac的bug已经修复)
一键安装脚本(shell)
测试用配置文件(nginx.conf)

su -
unzip HLSEasyInstall.zip
cd HLSEasyInstall
chmod *.sh
./install.sh

编译安装

安装依赖包

yum -y install gcc gcc-c++ glibc glibc-devel make nasm pkgconfig lib-devel openssl-devel expat-devel gettext-devel libtool mhash.x86_64 perl-Digest-SHA1.x86_64 git

注:lib-devel 可能找不到,并不影响安装,忽略。

安装FFmpeg及其依赖包

添加 /usr/local/lib 到系统库目录

cat /etc/ld.so.conf
echo "/usr/local/lib" >> /etc/ld.so.conf
cat /etc/ld.so.conf
ldconfig

Yasm (Linux 下 NASM 汇编器,支持 x86 和 AMD64)

wget -c http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz -O yasm-1.3.0.tar.gz
tar zxvf yasm-1.3.0.tar.gz
cd yasm-1.3.0
./configure
make
make install

x264 (H264 编码器)

git clone git://git.videolan.org/x264
#tar xcvf x264.tar.gz x264
tar zxvf x264.tar.gz
cd x264
./configure --enable-shared 
make
make install

LAME (MP3 编码器)

wget -c http://downloads.sourceforge.net/project/lame/lame/3.99/lame-3.99.5.tar.gz
tar zxvf lame-3.99.5.tar.gz
cd lame-3.99.5
./configure --enable-nasm
make
make install

liboog (Ogg 解码器)

wget -c http://downloads.xiph.org/releases/ogg/libogg-1.3.2.tar.gz
tar zxvf libogg-1.3.2.tar.gz
cd libogg-1.3.2
./configure
make
make install

libvorbis (音频编码解码库)

wget -c http://downloads.xiph.org/releases/vorbis/libvorbis-1.3.5.tar.gz
tar zxvf libvorbis-1.3.5.tar.gz
cd libvorbis-1.3.5
./configure
make
make install

libvpx (VP8/VP9 编码解码器)

git clone http://git.chromium.org/webm/libvpx.git
#tar xcvf libvpx.tar.gz libvpx
#tar zxvf libvpx.tar.gz
cd libvpx
./configure  --enable-shared
make
make install

FAAD2 (AAC 解码器)

wget -c http://downloads.sourceforge.net/faac/faad2-2.7.tar.gz
tar zxvf faad2-2.7.tar.gz
cd faad2-2.7
./configure
make
make install

FAAC (AAC 编码器)

wget -c http://downloads.sourceforge.net/faac/faac-1.28.tar.gz
tar zxvf faac-1.28-bugfix.tar.gz
cd faac-1.28
./configure
make
make install
【错误处理】
编译FAAC-1.28时遇到错误:
mpeg4ip.h:126: error: new declaration ‘char* strcasestr(const char*, const char*)’

解决方法:
从123行开始修改此文件 *./common/mp4v2/mpeg4ip.h*,到129行结束。
修改前:
#ifdef __cplusplus
extern "C" {
#endif
char *strcasestr(const char *haystack, const char *needle);
#ifdef __cplusplus
}
#endif

修改后:
#ifdef __cplusplus
extern "C++" {
#endif
const char *strcasestr(const char *haystack, const char *needle);
#ifdef __cplusplus
}
#endif

Xvid (MPEG-4 视频编码解码器)

wget -c http://downloads.xvid.org/downloads/xvidcore-1.3.2.tar.gz
tar zxvf xvidcore-1.3.2.tar.gz
cd xvidcore/build/generic
./configure
make
make install

FFmpeg (多功能音视频库)

git clone git://source.ffmpeg.org/ffmpeg
cd ffmpeg
./configure  --prefix=/opt/ffmpeg/ --enable-version3  --enable-libvpx --enable-libfaac --enable-libmp3lame  --enable-libvorbis --enable-libx264 --enable-libxvid --enable-shared --enable-gpl --enable-postproc --enable-nonfree  --enable-avfilter --enable-pthreads
make
make install

添加 /opt/ffmpeg/lib 到系统库目录

cat /etc/ld.so.conf
echo "/opt/ffmpeg/lib" >> /etc/ld.so.conf
cat /etc/ld.so.conf
ldconfig

验证 FFmpeg

/opt/ffmpeg/bin/ffmpeg

Nginx + nginx-rtmp-module (Web 服务器,RTMP服务器)

yum install -y gcc gcc-c++ make openssl-devel pcre pcre-devel
wget -c http://nginx.org/download/nginx-1.8.0.tar.gz
tar xzvf nginx-1.8.0.tar.gz
wget -c https://github.com/arut/nginx-rtmp-module/archive/v1.1.7.tar.gz -O nginx-rtmp-module-1.1.7.tar.gz
tar xzvf nginx-rtmp-module-1.1.7.tar.gz
cd nginx-1.8.0
./configure --prefix=/usr/local/nginx --with-http_ssl_module --add-module=/opt/nginx-rtmp-module-1.1.7
make
make install

验证 Nginx

# run nginx
/usr/local/nginx/sbin/nginx -c /usr/local/nginx/conf/nginx.conf
# web server
wget http://localhost

更换 nginx.conf

#user  nobody;
worker_processes  1;

#pid        logs/nginx.pid;

events {
    worker_connections  1024;
}

rtmp {

    server {

        listen 1935;

        chunk_size 4000;

        application live {
            live on;
        }

        # TV mode: one publisher, many subscribers
        application mytv {

            # enable live streaming
            live on;

            # record first 1K of stream
            record all;
            record_path /tmp/av;
            record_max_size 1K;

            # append current timestamp to each flv
            record_unique on;

            # publish only from localhost
            allow publish 127.0.0.1;
            deny publish all;

            #allow play all;
        }

        # Transcoding (ffmpeg needed)
        application big {
            live on;

            # On every pusblished stream run this command (ffmpeg)
            # with substitutions: $app/${app}, $name/${name} for application & stream name.
            #
            # This ffmpeg call receives stream from this application &
            # reduces the resolution down to 32x32. The stream is the published to
            # 'small' application (see below) under the same name.
            #
            # ffmpeg can do anything with the stream like video/audio
            # transcoding, resizing, altering container/codec params etc
            #
            # Multiple exec lines can be specified.

            exec /opt/ffmpeg/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name -vcodec flv -acodec copy -s 160x120
                        -f flv rtmp://localhost:1935/small/${name};
        }

        application small {
            live on;
            # Video with reduced resolution comes here from ffmpeg
        }

        application webcam {
            live on;

            # Stream from local webcam
            exec_static /opt/ffmpeg/bin/ffmpeg -f video4linux2 -i /dev/video0 -c:v libx264 -an
                               -f flv rtmp://localhost:1935/webcam/mystream;
        }

        application mypush {
            live on;

            # Every stream published here
            # is automatically pushed to
            # these two machines
            ##push rtmp1.example.com;
            ##push rtmp2.example.com:1934;
        }

        application mypull {
            live on;

            # Pull all streams from remote machine
            # and play locally
            ##pull rtmp://rtmp3.example.com pageUrl=www.example.com/index.html;
        }

        application mystaticpull {
            live on;

            # Static pull is started at nginx start
            ##pull rtmp://rtmp4.example.com pageUrl=www.example.com/index.html name=mystream static;
        }

        # video on demand
        application vod {
            play /var/flvs;
        }

        application vod2 {
            play /var/mp4s;
        }

        # Many publishers, many subscribers
        # no checks, no recording
        application videochat {

            live on;

            # The following notifications receive all
            # the session variables as well as
            # particular call arguments in HTTP POST
            # request

            # Make HTTP request & use HTTP retcode
            # to decide whether to allow publishing
            # from this connection or not
            on_publish http://localhost:8080/publish;

            # Same with playing
            on_play http://localhost:8080/play;

            # Publish/play end (repeats on disconnect)
            on_done http://localhost:8080/done;

            # All above mentioned notifications receive
            # standard connect() arguments as well as
            # play/publish ones. If any arguments are sent
            # with GET-style syntax to play & publish
            # these are also included.
            # Example URL:
            #   rtmp://localhost/myapp/mystream?a=b&c=d

            # record 10 video keyframes (no audio) every 2 minutes
            record keyframes;
            record_path /tmp/vc;
            record_max_frames 10;
            record_interval 2m;

            # Async notify about an flv recorded
            on_record_done http://localhost:8080/record_done;

        }


        # HLS

        # For HLS to work please create a directory in tmpfs (/tmp/hls here)
        # for the fragments. The directory contents is served via HTTP (see
        # http{} section in config)
        #
        # Incoming stream must be in H264/AAC. For iPhones use baseline H264
        # profile (see ffmpeg example).
        # This example creates RTMP stream from movie ready for HLS:
        #
        # ffmpeg -loglevel verbose -re -i movie.avi  -vcodec libx264
        #    -vprofile baseline -acodec libmp3lame -ar 44100 -ac 1
        #    -f flv rtmp://localhost:1935/hls/movie
        #
        # If you need to transcode live stream use 'exec' feature.
        #
        application hls {
            live on;
            hls on;
            hls_path /tmp/hls;
            hls_fragment 2s;
            hls_playlist_length 6s;
        }

        # MPEG-DASH is similar to HLS
        application dash {
            live on;
            dash on;
            dash_path /tmp/dash;
        }
    }
}

# HTTP can be used for accessing RTMP stats
http {

    server {

        listen      80;

        # This URL provides RTMP statistics in XML
        location /stat {
            rtmp_stat all;

            # Use this stylesheet to view XML as web page
            # in browser
            rtmp_stat_stylesheet stat.xsl;
        }

        location /stat.xsl {
            # XML stylesheet to view RTMP stats.
            # Copy stat.xsl wherever you want
            # and put the full directory path here
            root /path/to/stat.xsl/;
        }

        location /hls {
            # Serve HLS fragments
            types {
                application/vnd.apple.mpegurl m3u8;
                video/mp2t ts;
            }
            root /tmp;
            add_header Cache-Control no-cache;
        }

        location /dash {
            # Serve DASH fragments
            root /tmp;
            add_header Cache-Control no-cache;
        }
    }
}

验证 RTMP
使用标准推流工具 FMLE,推送RTMP流到URL

rtmp://x.x.x.x:1935/live/stream

使用 FastPlay 播放RTMP流 goto

Server:rtmp://x.x.x.x:1935/live
Stream:stream

验证 HLS
使用标准推流工具 FMLE,推送RTMP流到URL

rtmp://x.x.x.x:1935/hls/stream

使用 VLC 播放 HLS 流 goto
打开网络串流,URL

http://120.26.109.136/hls/stream.m3u8

验证 FFmpeg
推送流 rtmp://x.x.x.x:1935/big/stream
播放流 rtmp://x.x.x.x:1935/small/stream
此处使用 FFmpeg 把 big 的视频流的分辨率(建议使用320*240以上分辨率,方便查看效果)转换为 160*120,并重新发布到 small 流
使用标准推流工具 FMLE,推送RTMP流到URL

rtmp://x.x.x.x:1935/big/stream

使用 FastPlay 播放RTMP流 goto

Server:rtmp://x.x.x.x:1935/small
Stream:stream

参考资料
使用Nginx+FFMPEG搭建HLS直播转码服务器 goto
nginx+nginx-rtmp-module+ffmpeg搭建流媒体服务器 goto
Nginx 的启动、停止、平滑重启、信号控制和平滑升级 goto

工具推荐
1.软件开发必备代理,低延迟(50ms),月费10元,详询(宝哥儿QQ1066690060)请自觉遵守国家法律法规

你可能感兴趣的:(系统运维,服务器框架)