package com.whyc.widget;
|
|
import android.annotation.SuppressLint;
|
import android.content.Context;
|
import android.content.Intent;
|
import android.content.SharedPreferences;
|
import android.graphics.Bitmap;
|
import android.graphics.BitmapFactory;
|
import android.graphics.ImageFormat;
|
import android.graphics.Rect;
|
import android.graphics.SurfaceTexture;
|
import android.graphics.YuvImage;
|
import android.hardware.camera2.CameraAccessException;
|
import android.hardware.camera2.CameraCaptureSession;
|
import android.hardware.camera2.CameraDevice;
|
import android.hardware.camera2.CameraManager;
|
import android.hardware.camera2.CameraMetadata;
|
import android.hardware.camera2.CaptureRequest;
|
import android.media.Image;
|
import android.media.ImageReader;
|
import android.media.MediaCodec;
|
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecList;
|
import android.media.MediaFormat;
|
import android.media.MediaMuxer;
|
import android.media.MediaScannerConnection;
|
import android.net.Uri;
|
import android.os.Environment;
|
import android.os.Handler;
|
import android.os.HandlerThread;
|
import android.support.annotation.NonNull;
|
import android.util.AttributeSet;
|
import android.view.Surface;
|
import android.view.TextureView;
|
import android.view.View;
|
import android.widget.LinearLayout;
|
import android.widget.TextView;
|
import android.widget.Toast;
|
|
import java.io.ByteArrayOutputStream;
|
import java.io.File;
|
import java.io.FileOutputStream;
|
import java.io.IOException;
|
import java.nio.BufferOverflowException;
|
import java.nio.ByteBuffer;
|
import java.util.Arrays;
|
|
import static android.os.Environment.DIRECTORY_DOCUMENTS;
|
|
/**
|
* 这个采用ImageReader视频帧来获取视频和处理图片
|
* */
|
public class Camera2TextureView4 extends TextureView {
|
|
private Context mContext;
|
private CameraDevice mCameraDevice;
|
private CaptureRequest.Builder captureBuilder = null;
|
private CameraCaptureSession mCaptureSession;
|
private ImageReader mImageReader;
|
private Surface previewSurface;
|
|
private MediaCodec mMediaCodec;
|
private MediaMuxer mMediaMuxer;
|
private Handler mBackgroundHandler;
|
private HandlerThread mBackgroundThread;
|
private int videoWidth = 1920;
|
private int videoHeight = 1080;
|
|
//视频和图片的文件夹路径
|
File fileDir = new File(Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath() + "/yc_test/");
|
String recorderPath = fileDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + ".mp4";
|
|
//主页面的视图
|
private LinearLayout llUpText;
|
private TextView tvDevice;
|
|
public Camera2TextureView4(Context context) {
|
super(context,null);
|
}
|
|
public Camera2TextureView4(Context context, AttributeSet attrs) throws IOException {
|
super(context, attrs);
|
mContext = context;
|
mBackgroundThread = new HandlerThread("Camera2VideoRecorder");
|
mBackgroundThread.start();
|
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
|
mImageReader = ImageReader.newInstance(videoWidth, videoHeight, ImageFormat.YUV_420_888, 2);
|
|
mMediaCodec = createAndConfigureEncoder(videoWidth, videoHeight);
|
// mMediaCodec.start();
|
// String recorderPath = fileDir.getAbsolutePath()+File.separator + System.currentTimeMillis() + ".mp4";
|
/*try {
|
this.mMediaMuxer = new MediaMuxer(recorderPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
this.mMediaMuxer.setOrientationHint(90);
|
}catch (Exception e){
|
e.printStackTrace();
|
}*/
|
}
|
|
public void init(LinearLayout llUpText, TextView tvDevice) {
|
this.llUpText = llUpText;
|
this.tvDevice = tvDevice;
|
setSurfaceTextureListener(mSurfaceTextureListener);
|
}
|
|
private SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
|
@Override
|
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
|
initCamera2();
|
}
|
@Override
|
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
|
|
}
|
@Override
|
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
|
return false;
|
}
|
@Override
|
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
|
|
}
|
};
|
|
@SuppressLint("MissingPermission")
|
private void initCamera2() {
|
//获取权限
|
// ActivityCompat.requestPermissions(Camera2TextureViewActivity.this, permissionArray, 1);
|
CameraManager cm = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
|
try {
|
String cameraId = cm.getCameraIdList()[0];
|
cm.openCamera(cameraId,mDeviceStateCallback,null);
|
}catch (Exception e){
|
e.printStackTrace();
|
}
|
}
|
|
private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
|
@Override
|
public void onOpened(CameraDevice cameraDevice) {
|
mCameraDevice = cameraDevice;
|
SurfaceTexture surfaceTexture = getSurfaceTexture();
|
surfaceTexture.setDefaultBufferSize(1920, 1080);
|
previewSurface = new Surface(surfaceTexture);
|
|
try {
|
createPreviewSession();
|
}catch (Exception e){
|
e.printStackTrace();
|
}
|
}
|
|
@Override
|
public void onDisconnected(@NonNull CameraDevice camera) {
|
|
}
|
|
@Override
|
public void onError(@NonNull CameraDevice camera, int error) {
|
|
}
|
};
|
|
private void createPreviewSession() {
|
try {
|
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
|
@Override
|
public void onConfigured(@NonNull CameraCaptureSession session) {
|
mCaptureSession = session;
|
startPreview(mCameraDevice);
|
}
|
|
@Override
|
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
|
|
}
|
}, null);
|
}catch (Exception e){
|
e.printStackTrace();
|
}
|
}
|
private void startPreview(CameraDevice cameraDevice) {
|
//启动预览
|
try {
|
captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
|
captureBuilder.addTarget(previewSurface);
|
|
/*设置预览的界面*/
|
//设置自动对焦模式
|
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
|
CaptureRequest.CONTROL_AF_MODE_AUTO);
|
// 设置自动曝光模式
|
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
|
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
|
// 开始对焦
|
captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
|
CameraMetadata.CONTROL_AF_TRIGGER_START);
|
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, 90);
|
|
mCaptureSession.setRepeatingRequest(captureBuilder.build(), null, mBackgroundHandler);
|
} catch (CameraAccessException e) {
|
e.printStackTrace();
|
}
|
}
|
|
/*MediaRecorder mediaRecorder = null;
|
private void prepareVideoRecorder(String recorderPath,Surface inputSurface) {
|
MediaProjection mediaProjection = mediaProjectionManager.getMediaProjection(RESULT_OK, null);
|
VirtualDisplay virtualDisplay = mediaProjection.createVirtualDisplay("virtualDisplay", 1920, 1080, 60, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, inputSurface, null, null);
|
|
mediaRecorder = new MediaRecorder();
|
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
|
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
|
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
|
mediaRecorder.setVideoSize(1920, 1080);
|
mediaRecorder.setOutputFile(recorderPath);
|
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
|
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
|
mediaRecorder.setVideoEncodingBitRate(512 * 1000);
|
mediaRecorder.setInputSurface(inputSurface);
|
mediaRecorder.setVideoFrameRate(30); // 可以根据需要调整帧率
|
try {
|
mediaRecorder.prepare();
|
} catch (IOException e) {
|
e.printStackTrace();
|
}
|
}*/
|
public void createRecorderSession() throws IOException {
|
String recorderPath = fileDir.getAbsolutePath()+File.separator + System.currentTimeMillis() + ".mp4";
|
try {
|
this.mMediaMuxer = new MediaMuxer(recorderPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
this.mMediaMuxer.setOrientationHint(90);
|
|
mImageReader.setOnImageAvailableListener(mImageReaderListener, null);
|
captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
|
captureBuilder.addTarget(previewSurface);
|
captureBuilder.addTarget(mImageReader.getSurface());
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
try {
|
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface,mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
|
@Override
|
public void onConfigured(@NonNull CameraCaptureSession session) {
|
mCaptureSession = session;
|
try {
|
//camera2 视频录制模式
|
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
|
mCaptureSession.setRepeatingRequest(captureBuilder.build(), null, mBackgroundHandler);
|
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
}
|
|
@Override
|
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
|
|
}
|
}, null);
|
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
}
|
|
private int mVideoTrackIndex = -1;
|
int I420size = videoWidth*videoHeight*3/2;
|
private ImageReader.OnImageAvailableListener mImageReaderListener = reader -> {
|
Image image = reader.acquireNextImage();
|
|
byte[] nv12 = new byte[I420size];
|
YUVToNV21_NV12(image,nv12,videoWidth,videoHeight,"NV12");
|
long now = image.getTimestamp();
|
image.close();
|
//图片截图
|
SharedPreferences camera2Time = mContext.getSharedPreferences("camera2Time", Context.MODE_PRIVATE);
|
long lastTime = camera2Time.getLong("time", 0);
|
|
if (lastTime == 0 || now-lastTime > 1000000000) {
|
camera2Time.edit().putLong("time", now).apply();
|
Bitmap bitmap = nv21ToBitmap(nv12, videoWidth, videoHeight);
|
saveBitmapToFile(bitmap);
|
}
|
|
// 提交数据给MediaCodec
|
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
|
if (inputBufferIndex >= 0) {
|
ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputBufferIndex);
|
inputBuffer.clear();
|
inputBuffer.put(nv12);
|
long nanoTime = System.nanoTime();
|
long presentationTimeUs = nanoTime / 1000; // 将纳秒转换为微秒
|
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, nv12.length, presentationTimeUs, 0);
|
}
|
// 获取编码后的数据汇入H264 封装成mp4
|
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
|
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
mVideoTrackIndex = mMediaMuxer.addTrack(mMediaCodec.getOutputFormat());
|
if(mVideoTrackIndex != -1){
|
mMediaMuxer.start();
|
}
|
}
|
while (outputBufferIndex >=0){
|
ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(outputBufferIndex);
|
if (mVideoTrackIndex != -1) {
|
mMediaMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo);
|
}
|
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
|
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
|
if((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) !=0){
|
break;
|
}
|
}
|
|
/*ByteBuffer encodedData = mMediaCodec.getOutputBuffer(outputBufferIndex);
|
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
|
// 如果MediaMuxer还没有准备好,添加视频轨道
|
|
// 写入数据到MediaMuxer
|
MediaFormat format = mMediaCodec.getOutputFormat();
|
int trackIndex = mMediaMuxer.addTrack(format);
|
if(trackIndex)
|
mMediaMuxer.start();
|
|
encodedData.position(bufferInfo.offset);
|
encodedData.limit(bufferInfo.offset + bufferInfo.size);
|
mMediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
|
}
|
|
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);*/
|
|
};
|
|
private static void YUVToNV21_NV12(Image image, byte[] nv21, int w, int h,String type) {
|
Image.Plane[] planes = image.getPlanes();
|
int remaining0 = planes[0].getBuffer().remaining();
|
int remaining1 = planes[1].getBuffer().remaining();
|
int remaining2 = planes[2].getBuffer().remaining();
|
//分别准备三个数组接收YUV分量。
|
byte[] yRawSrcBytes = new byte[remaining0];
|
byte[] uRawSrcBytes = new byte[remaining1];
|
byte[] vRawSrcBytes = new byte[remaining2];
|
planes[0].getBuffer().get(yRawSrcBytes);
|
planes[1].getBuffer().get(uRawSrcBytes);
|
planes[2].getBuffer().get(vRawSrcBytes);
|
int j = 0, k = 0;
|
boolean flag = type.equals("NV21");
|
for (int i = 0; i < nv21.length; i++) {
|
if (i < w * h) {
|
//首先填充w*h个Y分量
|
nv21[i] = yRawSrcBytes[i];
|
} else {
|
if (flag) {
|
//若NV21类型 则Y分量分配完后第一个将是V分量
|
nv21[i] = vRawSrcBytes[j];
|
//PixelStride有用数据步长 = 1紧凑按顺序填充,=2每间隔一个填充数据
|
j += planes[1].getPixelStride();
|
} else {
|
//若NV12类型 则Y分量分配完后第一个将是U分量
|
nv21[i] = uRawSrcBytes[k];
|
//PixelStride有用数据步长 = 1紧凑按顺序填充,=2每间隔一个填充数据
|
k += planes[2].getPixelStride();
|
}
|
//紧接着可以交错UV或者VU排列不停的改变flag标志即可交错排列
|
flag = !flag;
|
}
|
}
|
}
|
|
public static byte[] imageToNV12(Image image) {
|
final int width = image.getWidth();
|
final int height = image.getHeight();
|
|
Image.Plane[] planes = image.getPlanes();
|
byte[] yuv = new byte[width * height *2];
|
|
// Y component
|
ByteBuffer yBuffer = planes[0].getBuffer();
|
int ySize = yBuffer.remaining();
|
yBuffer.get(yuv, 0, ySize);
|
|
// U and V components
|
ByteBuffer uBuffer = planes[1].getBuffer();
|
ByteBuffer vBuffer = planes[2].getBuffer();
|
int uSize = uBuffer.remaining();
|
int vSize = vBuffer.remaining();
|
|
uBuffer.get(yuv, ySize, uSize);
|
vBuffer.get(yuv, ySize + uSize, vSize);
|
|
// Swap U and V for NV21 format
|
byte[] swapped = new byte[yuv.length];
|
System.arraycopy(yuv, 0, swapped, 0, ySize);
|
System.arraycopy(yuv, ySize + uSize, swapped, ySize + uSize, vSize);
|
System.arraycopy(yuv, ySize, swapped, ySize, uSize);
|
|
return swapped;
|
}
|
|
/*private void encodeFrame(Image image) {
|
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
int inputBufferId = mMediaCodec.dequeueInputBuffer(100);
|
if (inputBufferId >= 0) {
|
ByteBuffer inputBuffer = mMediaCodec.getInputBuffers()[inputBufferId];
|
inputBuffer.clear();
|
|
//这里是重点!
|
Image.Plane[] planes = image.getPlanes();
|
ByteBuffer[] yuvPlanes = new ByteBuffer[planes.length];
|
int[] offsets = new int[planes.length];
|
int[] strides = new int[planes.length];
|
|
for (int i = 0; i < planes.length; i++) {
|
yuvPlanes[i] = planes[i].getBuffer();
|
offsets[i] = yuvPlanes[i].position();
|
strides[i] = planes[i].getRowStride();
|
}
|
|
int height = image.getHeight();
|
int ySize = calculateSize(strides[0], image.getHeight());
|
int uSize = calculateSize(strides[1], image.getHeight() / 2);
|
int vSize = calculateSize(strides[2], image.getHeight() / 2);
|
|
if (ySize + uSize + vSize > inputBuffer.capacity()) {
|
throw new BufferOverflowException();
|
}
|
// Copy Y plane
|
copyPlaneData(yuvPlanes[0], inputBuffer, ySize, strides[0],height);
|
|
// Copy U plane
|
copyPlaneData(yuvPlanes[1], inputBuffer, uSize, strides[1],height/2);
|
|
// Copy V plane
|
copyPlaneData(yuvPlanes[2], inputBuffer, vSize, strides[2],height/2);
|
|
mMediaCodec.queueInputBuffer(
|
inputBufferId,
|
0,
|
ySize + uSize + vSize,
|
image.getTimestamp(),
|
0
|
);
|
*//*int ySize = planes[0].getBuffer().remaining();
|
int uvSize = planes[1].getBuffer().remaining();
|
|
// Copy Y plane
|
planes[0].getBuffer().rewind();
|
inputBuffer.put(planes[0].getBuffer());
|
|
// Copy U and V planes
|
planes[1].getBuffer().rewind();
|
inputBuffer.put(planes[1].getBuffer());
|
planes[2].getBuffer().rewind();
|
inputBuffer.put(planes[2].getBuffer());
|
|
mMediaCodec.queueInputBuffer(
|
inputBufferId,
|
0,
|
ySize + 2 * uvSize, // Size of YUV data
|
image.getTimestamp(),
|
0
|
);*//*
|
|
}
|
int encoderStatus = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000);
|
MediaFormat format = mMediaCodec.getOutputFormat();
|
int track = mMediaMuxer.addTrack(format);
|
if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
mMediaMuxer.start();
|
} else if (encoderStatus >= 0) {
|
ByteBuffer encodedData = mMediaCodec.getOutputBuffers()[encoderStatus];
|
if (encodedData != null && encodedData.position() < encodedData.limit()) {
|
mMediaMuxer.writeSampleData(track, encodedData, bufferInfo);
|
}
|
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
|
}
|
}
|
|
private int calculateSize(int stride, int height) {
|
return stride * height;
|
}
|
|
private void copyPlaneData(ByteBuffer src, ByteBuffer dst, int size, int stride,int height) {
|
int rowPadding = stride - (size / height);
|
int shift = 0;
|
while (src.hasRemaining()) {
|
dst.put(src.get());
|
shift++;
|
if (shift == stride) {
|
shift = 0;
|
dst.position(dst.position() + rowPadding);
|
}
|
}
|
}*/
|
|
private static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
|
Bitmap bitmap = null;
|
try {
|
YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
|
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
//输出到对应流
|
image.compressToJpeg(new Rect(0, 0, width, height), 100, stream);
|
//对应字节流生成bitmap
|
bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
|
stream.close();
|
} catch (IOException e) {
|
e.printStackTrace();
|
}
|
return bitmap;
|
}
|
|
private void saveBitmapToFile(Bitmap bitmap) {
|
// 定义图片的保存路径和文件名
|
String fileName = "IMG_" + System.currentTimeMillis() + ".jpg";
|
String filePath = Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath() + "/yc_test"+ File.separator + fileName;
|
try{
|
FileOutputStream fos = new FileOutputStream(filePath);
|
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
|
fos.flush();
|
}catch (Exception e){
|
e.printStackTrace();
|
}
|
}
|
private void saveImageToGallery(byte[] data) {
|
// 定义图片的保存路径和文件名
|
String fileName = "IMG_" + System.currentTimeMillis() + ".jpg";
|
String filePath = Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath() + "/yc_test"+ File.separator + fileName;
|
|
// 创建文件输出流
|
try {
|
FileOutputStream fos = new FileOutputStream(filePath);
|
fos.write(data);
|
fos.close();
|
|
// 通知图库更新
|
MediaScannerConnection.scanFile(mContext, new String[]{filePath}, null, null);
|
|
// 在某些设备上,可能需要发送广播通知才能使图片立即出现在相册中
|
mContext.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.fromFile(new File(filePath))));
|
|
// 显示保存成功的提示
|
// Toast.makeText(this, "图片保存成功", Toast.LENGTH_SHORT).show();
|
} catch (IOException e) {
|
e.printStackTrace();
|
// 显示保存失败的提示
|
// Toast.makeText(this, "图片保存失败", Toast.LENGTH_SHORT).show();
|
}
|
}
|
|
public void stopRecording() {
|
//这里只是演示test
|
llUpText.setVisibility(View.GONE);
|
//停止画面图片监听
|
mImageReader.close();
|
this.mMediaCodec.stop();
|
this.mMediaCodec.release();
|
this.mMediaMuxer.stop();
|
this.mMediaMuxer.release();
|
// 停止录制视频
|
try {
|
// createPreviewSession();
|
} catch (IllegalStateException e) {
|
e.printStackTrace();
|
} finally {
|
// mMediaRecorder.reset();
|
// mMediaRecorder.release();
|
// mMediaRecorder = null;
|
}
|
|
// 关闭相机预览会话
|
// if (captureSession != null) {
|
// captureSession.close();
|
// captureSession = null;
|
// }
|
|
//图库更新
|
// addToGallery(recorderPath);
|
}
|
|
private void addToGallery(String videoFilePath) {
|
// 发送广播通知图库更新
|
MediaScannerConnection.scanFile(mContext, new String[]{videoFilePath}, null,
|
(path, uri) -> {
|
// 添加到相册成功的回调
|
Toast.makeText(mContext, "视频已保存至相册", Toast.LENGTH_SHORT).show();
|
});
|
}
|
|
private MediaCodec createAndConfigureEncoder(int width, int height) {
|
MediaCodecInfo codecInfo = selectCodec("video/avc");
|
MediaCodec codec = null;
|
try {
|
codec = MediaCodec.createByCodecName(codecInfo.getName());
|
}catch (Exception e){
|
e.printStackTrace();
|
}
|
MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height);
|
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
|
// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
|
format.setInteger(MediaFormat.KEY_BIT_RATE, width*height*4);
|
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
|
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
codec.start();
|
return codec;
|
}
|
private MediaCodecInfo selectCodec(String mimeType) {
|
int numCodecs = MediaCodecList.getCodecCount();
|
for (int i = 0; i < numCodecs; i++) {
|
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
|
if (!codecInfo.isEncoder()) {
|
continue;
|
}
|
String[] types = codecInfo.getSupportedTypes();
|
for (String type : types) {
|
if (type.equalsIgnoreCase(mimeType)) {
|
return codecInfo;
|
}
|
}
|
}
|
return null;
|
}
|
|
}
|