Android MediaCodec编解码与RTP传输

一、目的

本文的围绕Android的MediaCodec编解码进行展开,将摄像头采集的视频数据编码成H264数据,然后封装成RTP协议,利用UDP进行传输;接收端接收到RTP数据后进行解包成H264数据,然后交给MediaCodec进行解码显示,结构图如下:


二、MediaCodec编码

import java.nio.ByteBuffer;

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;

public class AvcEncoder {

	private MediaCodec mediaCodec;
	int m_width;
	int m_height;
	byte[] m_info = null;

	private int mColorFormat;
	private MediaCodecInfo codecInfo;
	 private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video
	private byte[] yuv420 = null; 
	@SuppressLint("NewApi")
	public AvcEncoder(int width, int height, int framerate, int bitrate) { 
		
		m_width  = width;
		m_height = height;
		Log.v("xmc", "AvcEncoder:"+m_width+"+"+m_height);
		yuv420 = new byte[width*height*3/2];
	
	    mediaCodec = MediaCodec.createEncoderByType("video/avc");
	    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
	    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
	    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
	    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);    
	    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);//关键帧间隔时间 单位s  
	    
	    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	    mediaCodec.start();
	}
	
	@SuppressLint("NewApi")
	public void close() {
	    try {
	        mediaCodec.stop();
	        mediaCodec.release();
	    } catch (Exception e){ 
	        e.printStackTrace();
	    }
	}

	@SuppressLint("NewApi")
	public int offerEncoder(byte[] input, byte[] output) {	
		Log.v("xmc", "offerEncoder:"+input.length+"+"+output.length);
		int pos = 0;
		swapYV12toI420(input, yuv420, m_width, m_height);
	    try {
	        ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
	        ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
	        int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
	        if (inputBufferIndex >= 0) {
	            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
	            inputBuffer.clear();
	            inputBuffer.put(input);
	            mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
	            
	        }

	        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
	        int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
	       
	        while (outputBufferIndex >= 0) {
	            ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
	            byte[] outData = new byte[bufferInfo.size];
	            outputBuffer.get(outData);

	            if(m_info != null){            	
	            	System.arraycopy(outData, 0,  output, pos, outData.length);
	 	            pos += outData.length;
	            	
	            }else{//保存pps sps 只有开始时 第一个帧里有, 保存起来后面用 
	            	 ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
	            	 Log.v("xmc", "swapYV12toI420:outData:"+outData);
	            	 Log.v("xmc", "swapYV12toI420:spsPpsBuffer:"+spsPpsBuffer);
//	            	
	            	 for(int i=0;i<outData.length;i++){
	            		 Log.e("xmc333", "run: get data rtpData[i]="+i+":"+outData[i]);//输出SPS和PPS循环
	            	 }
	            	 
	                 if (spsPpsBuffer.getInt() == 0x00000001) {  
	                	 m_info = new byte[outData.length];
	                	 System.arraycopy(outData, 0, m_info, 0, outData.length);
	                 }else {  
	                        return -1;
	                 }  	
	            }
	            if(output[4] == 0x65) {//key frame 编码器生成关键帧时只有 00 00 00 01 65 没有pps sps, 要加上
	                System.arraycopy(m_info, 0,  output, 0, m_info.length);
	                System.arraycopy(outData, 0,  output, m_info.length, outData.length);
		        }
	            mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
	            outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
	        }
	      
	    } catch (Throwable t) {
	        t.printStackTrace();
	    }
	    Log.v("xmc", "offerEncoder+pos:"+pos);
	    return pos;
	}
	
	//网友提供的,如果swapYV12toI420方法颜色不对可以试下这个方法,不同机型有不同的转码方式
	private void NV21toI420SemiPlanar(byte[] nv21bytes, byte[] i420bytes, int width, int height) {
		Log.v("xmc", "NV21toI420SemiPlanar:::"+width+"+"+height);
	    final int iSize = width * height;
	    System.arraycopy(nv21bytes, 0, i420bytes, 0, iSize);

	    for (int iIndex = 0; iIndex < iSize / 2; iIndex += 2) {
	        i420bytes[iSize + iIndex / 2 + iSize / 4] = nv21bytes[iSize + iIndex]; // U
	        i420bytes[iSize + iIndex / 2] = nv21bytes[iSize + iIndex + 1]; // V
	    }
	}
	
	//yv12 转 yuv420p  yvu -> yuv  
    private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height) {   
    	Log.v("xmc", "swapYV12toI420:::"+width+"+"+height);
    	Log.v("xmc", "swapYV12toI420:::"+yv12bytes.length+"+"+i420bytes.length+"+"+width * height);
    	System.arraycopy(yv12bytes, 0, i420bytes, 0, width*height);
    	System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);
    	System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);  
    } 
    //public static void arraycopy(Object src,int srcPos,Object dest,int destPos,int length)
    //src:源数组;	srcPos:源数组要复制的起始位置;
    //dest:目的数组;	destPos:目的数组放置的起始位置;	length:复制的长度。
}

三、封装成RTP

import java.io.IOException;
import java.nio.ByteBuffer;

public class RtpStream {

	private static final String TAG = "RtpStream";
	private int payloadType;
	private int sampleRate;
	private RtpSocket socket;
	private short sequenceNumber;
	private long timeold;


	public RtpStream(int pt, int sampleRate, RtpSocket socket){
		this.payloadType = pt;
		this.sampleRate = sampleRate;
		this.socket = socket;
	}	

	public void addPacket(byte[] data, int offset, int size, long timeUs) throws IOException{
		addPacket(null, data, offset, size, timeUs);
	}

	public void addPacket(byte[] prefixData, byte[] data, int offset, int size, long timeUs) throws IOException{
	
		/*
		RTP packet header
		Bit offset[b]	0-1	2	3	4-7	8	9-15	16-31
		0			Version	P	X	CC	M	PT	Sequence Number  31
		32			Timestamp									 63
		64			SSRC identifier								 95
		*/

		ByteBuffer buffer = ByteBuffer.allocate(500000);
		buffer.put((byte)(2 << 6));
		buffer.put((byte)(payloadType));
		buffer.putShort(sequenceNumber++);
		buffer.putInt((int)(timeUs));
		buffer.putInt(12345678);

		buffer.putInt(size);

		if(prefixData != null)
			buffer.put(prefixData);

		buffer.put(data, offset, size);
		
		sendPacket(buffer, buffer.position());

	}
	
	protected void sendPacket(ByteBuffer buffer, int size) throws IOException{
		socket.sendPacket(buffer.array(), 0, size);
		buffer.clear();
	}
}

四、UDP发送

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.util.Timer;
import java.util.TimerTask;

public class RtpUdp implements RtpSocket {

	private DatagramSocket mSocket;

	private InetAddress mInetAddress;
	private int mPort;

	
	public RtpUdp(String ip, int port, boolean broadcast){

		try {
			mInetAddress = InetAddress.getByName(ip);
			mPort = port;

			mSocket = new DatagramSocket();
			mSocket.setBroadcast(broadcast);
				
		} catch (Exception e) {
			e.printStackTrace();
		}

	}
	
	public void close(){
		mSocket.close();
	}
	
	@Override
	public void sendPacket(final byte[] data,final int offset, final int size) {

		try{
			DatagramPacket p;
			p = new DatagramPacket(data, offset, size, mInetAddress, mPort);
			mSocket.send(p);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

}

五、发送

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.List;


import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.os.StrictMode;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;


import com.android.screenrecorder.rtp.RtpSenderWrapper;
import com.encode.androidencode.AvcEncoder;


public class MainActivity extends Activity implements SurfaceHolder.Callback, PreviewCallback {


	DatagramSocket socket;
	InetAddress address;
	
	AvcEncoder avcCodec;
    public Camera m_camera;  
    SurfaceView   m_prevewview;
    SurfaceHolder m_surfaceHolder;
    //屏幕分辨率,每个机型不一样,机器连上adb后输入wm size可获取
    int width = 800;
    int height = 480;
    int framerate = 30;//每秒帧率
    int bitrate = 2500000;//编码比特率,
    private RtpSenderWrapper mRtpSenderWrapper;
    
    byte[] h264 = new byte[width*height*3];


	@SuppressLint("NewApi")
	@Override
	protected void onCreate(Bundle savedInstanceState) {
		Log.v("xmc", "MainActivity__onCreate");
		StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder()
        .detectDiskReads()
        .detectDiskWrites()
        .detectAll()   // or .detectAll() for all detectable problems
        .penaltyLog()
        .build());
StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder()
        .detectLeakedSqlLiteObjects()
        .detectLeakedClosableObjects()
        .penaltyLog()
        .penaltyDeath()
        .build());
		
		super.onCreate(savedInstanceState);
		setContentView(R.layout.activity_main);
		//创建rtp并填写需要发送数据流的地址,直播中需要动态获取客户主动请求的地址
		mRtpSenderWrapper = new RtpSenderWrapper("192.168.253.15", 5004, false);
		avcCodec = new AvcEncoder(width,height,framerate,bitrate);
		
		m_prevewview = (SurfaceView) findViewById(R.id.SurfaceViewPlay);
		m_surfaceHolder = m_prevewview.getHolder(); // 绑定SurfaceView,取得SurfaceHolder对象
		m_surfaceHolder.setFixedSize(width, height); // 预览大小設置
		m_surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		m_surfaceHolder.addCallback((Callback) this);	
		
	}


	@Override
	public boolean onCreateOptionsMenu(Menu menu) {
		getMenuInflater().inflate(R.menu.main, menu);
		return true;
	}
	@Override
	public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
	
	}


	@SuppressLint("NewApi")
	@SuppressWarnings("deprecation")
	@Override
	public void surfaceCreated(SurfaceHolder arg0) {
		Log.v("xmc", "MainActivity+surfaceCreated");
		try {
			m_camera = Camera.open();
			m_camera.setPreviewDisplay(m_surfaceHolder);
			Camera.Parameters parameters = m_camera.getParameters();
			parameters.setPreviewSize(width, height);
			parameters.setPictureSize(width, height);
			parameters.setPreviewFormat(ImageFormat.YV12);
			m_camera.setParameters(parameters);	
			m_camera.setPreviewCallback((PreviewCallback) this);
			m_camera.startPreview();
		} catch (IOException e){
			e.printStackTrace();
		}	
	}


	@Override
	public void surfaceDestroyed(SurfaceHolder arg0) {
		Log.v("xmc", "MainActivity+surfaceDestroyed");
		m_camera.setPreviewCallback(null);  //!!这个必须在前,不然退出出错
		m_camera.release();
		m_camera = null; 
		avcCodec.close();
	}


	@Override
	public void onPreviewFrame(byte[] data, Camera camera) {
		Log.v("xmc", "MainActivity+h264 start");
		int ret = avcCodec.offerEncoder(data, h264);
		if(ret > 0){
			//实时发送数据流
		    mRtpSenderWrapper.sendAvcPacket(h264, 0, ret, 0);
		}
		Log.v("xmc", "MainActivity+h264 end");
		Log.v("xmc", "-----------------------------------------------------------------------");
	}
}
六、对端解码显示

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.TextureView;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
import java.nio.ByteBuffer;

public class ClientTextureView extends TextureView implements  TextureView.SurfaceTextureListener{

    private static  final String MIME_TYPE = "video/avc";
    private static final String TAG = "ClientTextureView" ;
    private MediaCodec decode;

    byte[] rtpData =  new byte[80000];
    byte[] h264Data = new byte[80000];

    int timestamp = 0;

    DatagramSocket socket;

    public ClientTextureView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setSurfaceTextureListener(this);
        try {
            socket = new DatagramSocket(5004);//绔彛鍙�            socket.setReuseAddress(true);
            socket.setBroadcast(true);
        } catch (SocketException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        new PreviewThread(new Surface(surface),800,480);//鎵嬫満鐨勫垎杈ㄧ巼
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        if (socket != null){
            socket.close();
            socket = null;
        }
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {

    }

    private  class  PreviewThread extends  Thread {
        DatagramPacket datagramPacket = null;
        public PreviewThread(Surface surface, int width , int height){
            Log.e(TAG, "PreviewThread: gou zhao");
            decode = MediaCodec.createDecoderByType(MIME_TYPE);

            final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE,width,height);
            format.setInteger(MediaFormat.KEY_BIT_RATE,  40000);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

            byte[] header_sps = {0, 0, 0, 1, 103, 66, 0 , 41, -115, -115, 64, 80 , 30 , -48 , 15 ,8,-124, 83, -128};

            byte[] header_pps = {0,0 ,0, 1, 104, -54, 67, -56};

            format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
            format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));

            decode.configure(format,surface,null,0);
            decode.start();
            start();
        }


        @Override
        public void run() {
            byte[] data = new byte[80000];
            int h264Length = 0;
            while (true){
                if (socket != null){
                    try {
                        datagramPacket = new DatagramPacket(data,data.length);
                        socket.receive(datagramPacket);//鎺ユ敹鏁版嵁
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                rtpData =  datagramPacket.getData();
                if (rtpData != null ){
                    if (rtpData[0] == -128 && rtpData[1] == 96){
                        Log.e(TAG, "run:xxx");
                        int l1 = (rtpData[12]<<24)& 0xff000000;
                        int l2 = (rtpData[13]<<16)& 0x00ff0000;
                        int l3 = (rtpData[14]<<8) & 0x0000ff00;
                        int l4 = rtpData[15]&0x000000FF;
                        h264Length = l1+l2+l3+l4;
                        Log.e(TAG, "run: h264Length="+h264Length);
                        System.arraycopy(rtpData,16, h264Data,0,h264Length);
                        Log.e(TAG, "run:h264Data[0]="+h264Data[0]+","+h264Data[1]+","+h264Data[2]+","+h264Data[3]
                                +","+h264Data[4]+","+h264Data[5]+","+h264Data[6]+","+h264Data[7]
                                +","+h264Data[8]+","+h264Data[9]+","+h264Data[10]
                                +","+h264Data[11]+","+h264Data[12]+","+h264Data[13]
                                +","+h264Data[14]+","+h264Data[15]+","+h264Data[16]
                                +","+h264Data[17]+","+h264Data[18]+","+h264Data[19]
                                +","+h264Data[20]+","+h264Data[21]+","+h264Data[22]);//鎵撳嵃sps銆乸ps
                        offerDecoder(h264Data,h264Data.length);
                        Log.e(TAG, "run: offerDecoder=");
                    }
                }
            }
        }
    }

    //瑙g爜h264鏁版嵁
    private void offerDecoder(byte[] input, int length) {
         Log.d(TAG, "offerDecoder: ");
        try {
            ByteBuffer[] inputBuffers = decode.getInputBuffers();
            int inputBufferIndex = decode.dequeueInputBuffer(0);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                try{
                    inputBuffer.put(input, 0, length);
                }catch (Exception e){
                    e.printStackTrace();
                }
                decode.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

            int outputBufferIndex = decode.dequeueOutputBuffer(bufferInfo, 0);
            while (outputBufferIndex >= 0) {
                //If a valid surface was specified when configuring the codec,
                //passing true renders this output buffer to the surface.
                decode.releaseOutputBuffer(outputBufferIndex, true);
                outputBufferIndex = decode.dequeueOutputBuffer(bufferInfo, 0);
            }
        } catch (Throwable t) {
            t.printStackTrace();
        }
    }
}




猜你喜欢

转载自blog.csdn.net/chenhande1990chenhan/article/details/78774075