@Override
public void processData(byte[] data) {
Log.v(TAG, "processData start");
int ret = mAvcEncoder.offerEncoder(data, h264);
Log.v(TAG, "ret == "+ret);
if(ret > 0){
//实时发送数据流
byte[] h264Data = new byte[ret];
System.arraycopy(h264, 0, h264Data, 0, ret);
int dataLength = (h264Data.length - 1) / 1480 + 1;
final byte[][] sendData = new byte[dataLength][];
final boolean[] marks = new boolean[dataLength];
marks[marks.length - 1] = true;
int x = 0;
int y = 0;
int length = h264Data.length;
for (int i = 0; i < length; i++){
if (y == 0){
sendData[x] = new byte[length - i > 1480 ? 1480 : length - i];
}
sendData[x][y] = h264Data[i];
y++;
if (y == sendData[x].length){
y = 0;
x++;
}
}
mRtpDataSend.rtpSession.sendData(sendData, null, marks, -1, null);
第一步拿到yuv数据编码成h264数据,因为我在自己的机器上开发,机器支持硬编。这一步可根据自己的需求调整。
硬编的实现
public int offerEncoder(byte[] input, byte[] output) {
Log.v(TAG, "offerEncoder:"+input.length+"+"+output.length);
int pos = 0;
byte[] yuv420sp = new byte[m_width * m_height * 3 / 2];
//机器是NV21数据转成NV12
NV21ToNV12(input, yuv420sp, m_width, m_height);
input = yuv420sp;
//swapYV12toI420(input, yuv420sp, m_width, m_height);
if (null != input) {
try{
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if(m_info != null){
System.arraycopy(outData, 0, output, pos, outData.length);
pos += outData.length;
}else{//保存pps sps 只有开始时 第一个帧里有, 保存起来后面用
ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
Log.v(TAG, "NV21ToNV12:outData:"+outData);
Log.v(TAG, "NV21ToNV12:spsPpsBuffer:"+spsPpsBuffer);
for(int i=0;i
编好之后需要分包发送,分包代码是抄袭https://github.com/xcy396/MediaCodecDemo(站在巨人的肩膀上)可以详细看下这篇,我只是记录这个功能。
jlibrtp使用很简单,把代码全部拷进来就行了,然后发送端
public class RtpDataSend implements RTPAppIntf{
public RTPSession rtpSession = null;
private static final String TAG = "RtpDataSend";
public RtpDataSend() {
DatagramSocket rtpSocket = null;
DatagramSocket rtcpSocket = null;
try {
rtpSocket = new DatagramSocket(4003);
rtcpSocket = new DatagramSocket(4004);
} catch (Exception e) {
Log.d(TAG, e.toString());
}
//建立会话
rtpSession = new RTPSession(rtpSocket, rtcpSocket);
rtpSession.RTPSessionRegister(this,null,null);
//设置参与者(目标IP地址,RTP端口,RTCP端口)
Participant p = new Participant("192.168.43.195",4003, 4004);
rtpSession.addParticipant(p);
rtpSession.payloadType(96);
}
@Override
public void receiveData(DataFrame frame, Participant participant) {
// TODO Auto-generated method stub
}
@Override
public void userEvent(int type, Participant[] participant) {
// TODO Auto-generated method stub
}
@Override
public int frameSize(int payloadType) {
// TODO Auto-generated method stub
return 1;
}
}
注意ip地址写接收端手机的ip地址。
接收解码播放也很简单,直接贴代码了
public class ClientTextureView extends TextureView implements TextureView.SurfaceTextureListener{
private static final String MIME_TYPE = "video/avc";
private static final String TAG = "ClientTextureView" ;
private MediaCodec decode;
byte[] rtpData = new byte[800000];
byte[] h264Data = new byte[800000];
int timestamp = 0;
DatagramSocket socket;
public ClientTextureView(Context context, AttributeSet attrs) {
super(context, attrs);
setSurfaceTextureListener(this);
// try {
// socket = new DatagramSocket(5004);//端口号
// socket.setReuseAddress(true);
// socket.setBroadcast(true);
// } catch (SocketException e) {
// e.printStackTrace();
// }
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
new PreviewThread(new Surface(surface),1280,720);//手机的分辨率
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (socket != null){
socket.close();
socket = null;
}
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
private class PreviewThread extends Thread {
DatagramPacket datagramPacket = null;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
public PreviewThread(Surface surface, int width , int height){
Log.e(TAG, "PreviewThread: gou zhao");
try {
decode = MediaCodec.createDecoderByType(MIME_TYPE);
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE,width,height);
format.setInteger(MediaFormat.KEY_BIT_RATE, 40000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
byte[] header_sps = {0, 0, 0, 1, 103, 66, 0 , 41, -115, -115, 64, 80 , 30 , -48 , 15 ,8,-124, 83, -128};
byte[] header_pps = {0,0 ,0, 1, 104, -54, 67, -56};
format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
decode.configure(format,surface,null,0);
decode.start();
} catch (IOException e) {
e.printStackTrace();
}
start();
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
RtpDataReceive rtpDataReceive = new RtpDataReceive();
}
}
//解码h264数据
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void offerDecoder(byte[] input, int length) {
Log.d(TAG, "offerDecoder: ");
try {
ByteBuffer[] inputBuffers = decode.getInputBuffers();
int inputBufferIndex = decode.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
try{
inputBuffer.put(input, 0, length);
}catch (Exception e){
e.printStackTrace();
}
decode.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = decode.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
//If a valid surface was specified when configuring the codec,
//passing true renders this output buffer to the surface.
decode.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = decode.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
class RtpDataReceive implements RTPAppIntf {
public RTPSession rtpSession = null;
public RtpDataReceive(){
DatagramSocket rtpSocket = null;
DatagramSocket rtcpSocket = null;
try {
rtpSocket = new DatagramSocket(4003);
rtcpSocket = new DatagramSocket(4004);
} catch (Exception e) {
System.out.println("接收创建会话异常抛出:"+e);
}
//建立会话
rtpSession = new RTPSession(rtpSocket, rtcpSocket);
rtpSession.naivePktReception(true);
rtpSession.RTPSessionRegister(this,null,null);
}
private byte[] buf;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void receiveData(DataFrame frame, Participant participant) {
if (buf == null){
buf = frame.getConcatenatedData();
} else {
buf = Util.merge(buf, frame.getConcatenatedData());
}
if (frame.marked()){
offerDecoder(buf,buf.length);
buf = null;
}
}
@Override
public void userEvent(int type, Participant[] participant) {
}
@Override
public int frameSize(int payloadType) {
return 1;
}
}
}