Camear_MainActivity.java,主界面类
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
import local.asuper.localplayer.R;
/**
* 打开camera获取yuv数据,编码成h264,在sdcard下生成test1.h264文件
*/
public class Camear_MainActivity extends Activity {
private static final String TAG = "hxk_Camera_MainActivity";
private SurfaceView surfaceview;
private SurfaceHolder surfaceHolder;
private Camera camera;
private Parameters parameters;
int width = 1280;//宽高都可以整除16,16位对齐,否则随机性花屏
int height = 720;
// int width = 1920;
// int height = 2560;
int framerate = 25;
int biterate = 8500 * 1000;
private static int yuvqueuesize = 10;
public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize);
private AvcEncoder avcCodec;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camear__main);
surfaceview = (SurfaceView) findViewById(R.id.surfaceview);
surfaceHolder = surfaceview.getHolder();
surfaceHolder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated!\n");
camera = getBackCamera();
startcamera(camera);
if (SupportAvcCodec()) {
avcCodec = new AvcEncoder(width, height, framerate, biterate);
avcCodec.StartEncoderThread();
} else {
Log.e(TAG, "系统不支持AVC!\n");
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged!\n");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed!\n");
if (null != camera) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
avcCodec.StopThread();
}
}
});
}
public void putYUVData(byte[] buffer, int length) {
if (YUVQueue.size() >= 10) {
YUVQueue.poll();
}
YUVQueue.add(buffer);
}
/**
* 检测是系统否支持h264/avc
*
* @return
*/
private boolean SupportAvcCodec() {
if (Build.VERSION.SDK_INT >= 18) {
for (int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase("video/avc")) {
return true;
}
}
}
}
return false;
}
private void startcamera(Camera mCamera) {
if (mCamera != null) {
try {
mCamera.setPreviewCallback(new PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
putYUVData(data, data.length);
}
});
mCamera.setDisplayOrientation(90);
if (parameters == null) {
parameters = mCamera.getParameters();
}
parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(width, height);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 检测相机是否存在,获取后置相机
*
* @return
*/
private Camera getBackCamera() {
Camera c = null;
try {
c = Camera.open(0); // attempt to get a Camera instance
} catch (Exception e) {
e.printStackTrace();
}
return c; // returns null if camera is unavailable
}
}
本文福利, 免费领取C++音视频学习资料包、技术视频,内容包括(音视频开发,面试题,FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,srs)↓↓↓↓↓↓见下面↓↓文章底部点击免费领取↓↓
界面:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="@+id/surfaceview"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</RelativeLayout>
AvcEncoder.java
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.Log;
/**
* YUV编码h264/AVC
*/
public class AvcEncoder {
private final static String TAG = "MeidaCodec";
private int TIMEOUT_USEC = 12000;
private MediaCodec mediaCodec;
int m_width;
int m_height;
int m_framerate;
public byte[] configbyte;
public AvcEncoder(int width, int height, int framerate, int bitrate) {
m_width = width;
m_height = height;
m_framerate = framerate;
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);//关键帧时间间隔,即编码一次关键帧的时间间隔
// mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh );//设置profile为high,部分机型上设置没卵用
try {
mediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
createfile();
}
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile() {
File file = new File(path);
if (file.exists()) {
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e) {
e.printStackTrace();
}
}
private void StopEncoder() {
try {
mediaCodec.stop();
mediaCodec.release();
} catch (Exception e) {
e.printStackTrace();
}
}
public boolean isRuning = false;
public void StopThread() {
isRuning = false;
try {
StopEncoder();
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void StartEncoderThread() {
Thread EncoderThread = new Thread(new Runnable() {
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
if (Camear_MainActivity.YUVQueue.size() > 0) {
input = Camear_MainActivity.YUVQueue.poll();
byte[] yuv420sp = new byte[m_width * m_height * 3 / 2];
NV21ToNV12(input, yuv420sp, m_width, m_height);
input = yuv420sp;
}
if (input != null) {
try {
long startMs = System.currentTimeMillis();
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
pts = computePresentationTime(generateIndex);//时间戳
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);//放上时间戳
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
Log.d(TAG, "bufferInfo.size:" + bufferInfo.size);
outputBuffer.get(outData);
//BUFFER_FLAG_CODEC_CONFIG,sps pps
if (bufferInfo.flags == 2) {
configbyte = new byte[bufferInfo.size];
configbyte = outData;
} else if (bufferInfo.flags == 1) {//关键帧
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
} else {//非关键帧
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
/**
* 相机没有配置的情况下基本都是NV21格式
*
* @param nv21
* @param nv12
* @param width
* @param height
*/
private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
if (nv21 == null || nv12 == null) return;
int framesize = width * height;
int i = 0, j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for (i = 0; i < framesize; i++) {
nv12[i] = nv21[i];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j - 1] = nv21[j + framesize];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j] = nv21[j + framesize - 1];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
* 计算时间戳,frameindex每一帧增加一个
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}
}
本文福利, 免费领取C++音视频学习资料包、技术视频,内容包括(音视频开发,面试题,FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,srs)↓↓↓↓↓↓见下面↓↓文章底部点击免费领取↓↓