IP Camera代码分析

IP Camera代码分析

2013-1-14

 

一、大概流程

1.      new一个MediaRecorder并开始录制,2s后停掉?

2.      检测"/sdcard/ipcam/myvideo.mp4"中mdat的位置和SPS/PPS信息;

3.      建立camera和http的localsocket接口并传递至C++层;

4.      通知doCapture和doStreaming在localsocket准备收/发数据并处理(recv/send);

5.      new一个MediaRecorder,并把数据输出至先前建立的localsocket接口;

6.      doCapture会直接从mdat数据开始位置接收NAL并自己打上时间戳,AMR未做处理。

 

 

 

二、代码跟踪

private void setup()

         btnStart.setOnClickListener(startAction);

 

private OnClickListener startAction = newOnClickListener() {

   @Override

   public void onClick(View v) {

       doAction();

    }

};

 

private void doAction() {

         if( inServer == false) {

             myCamView.PrepareMedia(targetWid,targetHei);// myMediaRecorder = new MediaRecorder();

             boolean ret =myCamView.StartRecording(checkingFile);..................................... .........................................1

                   myCamView.StopMedia();

       if ( NativeAgent.NativeCheckMedia(targetWid, targetHei, checkingFile) ) ....................................................2

                startServer();............................................................................................................................................3

         }

}

 

1.      录制成本地文件"/sdcard/ipcam/myvideo.mp4"

public boolean StartRecording(StringtargetFile) {

         myMediaRecorder.setOutputFile(targetFile);

         returnrealyStart();

}

 

private boolean realyStart()

         myMediaRecorder.prepare();

         myMediaRecorder.start();

 

2.      从本地文件得知mdat的位置并搜索SPS/PPS

static public boolean NativeCheckMedia(int wid,int hei, String filename)

         nativeCheckMedia(wid,hei, filename)

JNIEXPORT jint JNICALLJNIDEFINE(nativeCheckMedia)(JNIEnv* env, jclass clz, jint wid, jint hei,jstring file_path)

         CheckMedia(wid,hei, mp4_file);

                   //找到mdat的位置;

                   //获取SPS/PPS

 

3.      启动camera loop和http loop

private void startServer()

         strServer= new StreamingServer(8080, resourceDirectory);

   strServer.setOnRequestListen(streamingRequest);

 

StreamingServer.OnRequestListenstreamingRequest = new StreamingServer.OnRequestListen()

         startStreaming()

 

private boolean startStreaming()

         cameraLoop.InitLoop();

   httpLoop.InitLoop();

   nativeAgt.NativeStartStreamingMedia(cameraLoop.getReceiverFileDescriptor(), httpLoop.getSenderFileDescriptor());...................................4

   myCamView.PrepareMedia(targetWid, targetHei); // myMediaRecorder = newMediaRecorder();

   boolean ret =myCamView.StartStreaming(cameraLoop.getSenderFileDescriptor());...........................................................................................5

 

4.      向C层传递cameracapture socket fd和httpd发送socket fd

static public voidNativeStartStreamingMedia(FileDescriptor in, FileDescriptor out)

         nativeStartStreamingMedia(in, out);

 

JNIEXPORT jint JNICALL JNIDEFINE(nativeStartStreamingMedia)(JNIEnv*env, jclass clz, jobject infdesc, jobject outfdesc)

         intinfd = getNativeFd(env, clz, infdesc);

   int outfd = getNativeFd(env, clz, outfdesc);

         StartStreamingMedia(infd, outfd);

 

int StartStreamingMedia(int infd, intoutfd)

         mediaBuffer= new MediaBuffer(32, 120, MAX_VIDEO_PACKAGE, 1024);

         mediaStreamer= new MediaStreamer(infd, outfd);

         mediaStreamer->Start();

 

void MediaStreamer::Start()

         captureThread->Post(this, MSG_BEGIN_CAPTURE_TASK);

         streamingThread->Post(this,MSG_BEGIN_STREAMING_TASK);

 

开始捕捉camera过来的数据和向外发送打包过的只有视频的flv数据

voidMediaStreamer::OnMessage(talk_base::Message *msg) {

   switch( msg->message_id) {

       case MSG_BEGIN_CAPTURE_TASK:

           doCapture();......................................................................................6

           break;

 

       case MSG_BEGIN_STREAMING_TASK:

           doStreaming();..................................................................................7

           break;

 

       default:

           break;

    }

}

 

5.      又new一个MediaRecorder并设置为流式输出

public boolean StartStreaming(FileDescriptortargetFd)

         myMediaRecorder.setOutputFile(targetFd);

         myMediaRecorder.setMaxDuration(9600000);    // Set max duration 4 hours

         //myMediaRecorder.setMaxFileSize(1600000000);// Set max file size 16G

         myMediaRecorder.setOnInfoListener(streamingEventHandler);

         returnrealyStart();

 

private boolean realyStart()

         myMediaRecorder.prepare();

         myMediaRecorder.start();

 

6.

void MediaStreamer::doCapture()

         if( buf[0] == 0x00  ) {

                   //recvH264 NAL单元;

                   //重新计算每个NAL单元的时间;

         }else{//参见8.

                   //recvAMR数据,但并没有PushBuffer,也就是没有把AMR数据送给httpd;

         }

 

详细代码:

// this method only support H.264 + AMR_NB

void MediaStreamer::doCapture() {

   unsigned char *buf;

   buf = new unsigned char[MAX_VIDEO_PACKAGE];

   unsigned int aseq = 0;

   unsigned int vseq = 0;

  

   const unsigned int STACK_SIZE = 128;

   std::list<int64_t> timestack;

   unsigned long last_ts = 0;

 

   // skip none useable heaer bytes

   fillBuffer( buf, mediaInfo.begin_skip);

 

   // fectching real time video data from camera

   while(1) {

       if ( fillBuffer(buf, 4) < 0)

           break;

 

checking_buffer:       

       if ( buf[0] == 0x00  ) {

           unsigned int vpkg_len = (buf[1] << 16) + (buf[2] << 8) +buf[3];

           if ( fillBuffer(&buf[4], vpkg_len ) < 0)

              break;

           vpkg_len += 4;

           

           if ( vpkg_len > (unsigned int)MAX_VIDEO_PACKAGE ) {

              LOGD("ERROR: Drop big videoframe....");

              vseq++;

              fillBuffer(vpkg_len);

              continue;

           }

 

           int slice_type = 0;

           if ( (buf[5] & 0xF8 ) == 0xB8) {

                slice_type = 1;

           } else if ( ((buf[5] & 0xFF) == 0x88)

                    && ((buf[5] &0x80) == 0x80) ) {

                slice_type = 1;

           } else if ( (buf[5] & 0xE0) == 0xE0) {

                slice_type = 0;

           } else if ( (buf[5] & 0xFE) == 0x9A) {

                slice_type = 0;

           }

           buf[0] = 0x00;

           buf[1] = 0x00;

           buf[2] = 0x00;

           buf[3] = 0x01;

          

#if 1       

           // computing the current package's timestamp

           int64_t cts = getCurrentTime() / 1000;

 

           if( timestack.size() >= STACK_SIZE) {

                timestack.pop_back();

                timestack.push_front(cts);

           } else {

                timestack.push_front(cts);

           }

 

           if ( timestack.size() <STACK_SIZE) {

                cts = (timestack.size() - 1) *100;      // default = 10 fps

                last_ts = (unsigned long)cts;

           } else {

                unsigned long total_ms;

                total_ms = timestack.front() -timestack.back();

                cts = last_ts + total_ms /(STACK_SIZE - 1);

                last_ts = cts;

           }

           mediaBuffer->PushBuffer( buf, vpkg_len, last_ts, slice_type ?MEDIA_TYPE_VIDEO_KEYFRAME : MEDIA_TYPE_VIDEO);

#else

           vseq ++;

           mediaBuffer->PushBuffer( buf, vpkg_len, vseq * 1000 /mediaInfo.video_frame_rate, slice_type ? MEDIA_TYPE_VIDEO_KEYFRAME :MEDIA_TYPE_VIDEO);

#endif

       } else {

           // fetching AMR_NB audio package

           static const unsigned char packed_size[16] = {12, 13, 15, 17, 19, 20,26, 31, 5, 0, 0, 0, 0, 0, 0, 0};

           unsigned int mode = (buf[0]>>3) & 0x0F;

           unsigned int size = packed_size[mode] + 1;

           if ( size > 4) {

                if ( fillBuffer(&buf[4], size -4) < 0)

                    break;

                aseq ++;

                //SignalNewPackage(buf, 32,ats, MEDIA_TYPE_AUDIO);

           } else {

                fillBuffer(&buf[4], size );

                for(int i = 0; i < 4; i++)

                    buf[i] = buf[size+i];

                //SignalNewPackage(buf, 32,ats, MEDIA_TYPE_AUDIO);

                goto checking_buffer;

           }

       }

    }

   delete buf;

}

 

7.      doStreaming主要是打包成flv向外发数据,不做细究。

 

 

8.      此表说明AMR的头字节不可能为0x00

规格

帧大小

帧头(1字节)

AMR 4.75

13

04 (0000 0100)

AMR 5.15

14

0C (0000 1100)

AMR 5.9

16

14 (0001 0100)

AMR 6.7

18

1C (0001 1100)

AMR 7.4

20

24 (0010 0100)

AMR 7.95

21

2C (0010 1100)

AMR 10.12

27

34 (0011 0100)

AMR 12.2

32

3C (0011 1100)

 

三、实验数据

 

你可能感兴趣的:(IP Camera代码分析)