whycxzp
2024-07-27 efa5fe48ebc866ec07a597ef551cf7e13c3b7aba
app/src/main/java/com/whyc/widget/Camera2TextureView4.java
@@ -2,19 +2,14 @@
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.ImageReader;
@@ -24,7 +19,6 @@
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaScannerConnection;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
@@ -32,16 +26,15 @@
import android.util.AttributeSet;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import com.whyc.util.BitmapUtil;
import com.whyc.util.YUVUtil;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.util.Arrays;
@@ -49,23 +42,34 @@
/**
 * 这个采用ImageReader视频帧来获取视频和处理图片
 * TODO 关闭录像出现mMediaMuxer 异常
 * */
public class Camera2TextureView4 extends TextureView {
    private Context mContext;
    private CameraDevice mCameraDevice;
    private CaptureRequest.Builder captureBuilder = null;
    private CaptureRequest.Builder previewCaptureBuilder = null;
    private CaptureRequest.Builder videoCaptureBuilder = null;
    private CameraCaptureSession mCaptureSession;
    private ImageReader mImageReader;
    private Surface previewSurface;
    private MediaCodec mMediaCodec;
    private MediaMuxer mMediaMuxer;
    private Handler mBackgroundHandler;
    private HandlerThread mBackgroundThread;
    private int videoWidth = 1920;
    private int videoHeight = 1080;
    /** 线程处理器*/
    private HandlerThread videoThread;
    private HandlerThread previewThread;
    private HandlerThread imageThread;
    private HandlerThread cameraThread;
    private Handler videoThreadHandler;
    private Handler previewThreadHandler;
    private Handler imageThreadHandler;
    private Handler cameraThreadHandler;
    //视频和图片的文件夹路径
    File fileDir = new File(Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath() + "/yc_test/");
    String recorderPath = fileDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + ".mp4";
@@ -81,20 +85,6 @@
    public Camera2TextureView4(Context context, AttributeSet attrs) throws IOException {
        super(context, attrs);
        mContext = context;
        mBackgroundThread = new HandlerThread("Camera2VideoRecorder");
        mBackgroundThread.start();
        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
        mImageReader = ImageReader.newInstance(videoWidth, videoHeight, ImageFormat.YUV_420_888, 2);
        mMediaCodec = createAndConfigureEncoder(videoWidth, videoHeight);
//        mMediaCodec.start();
//        String recorderPath = fileDir.getAbsolutePath()+File.separator + System.currentTimeMillis() + ".mp4";
        /*try {
            this.mMediaMuxer = new MediaMuxer(recorderPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            this.mMediaMuxer.setOrientationHint(90);
        }catch (Exception e){
            e.printStackTrace();
        }*/
    }
    public void init(LinearLayout llUpText, TextView tvDevice) {
@@ -129,6 +119,13 @@
        CameraManager cm = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
        try {
            String cameraId = cm.getCameraIdList()[0];
            //初始化预览线程
            initPreviewThreadHandler();
            cameraThread = new HandlerThread("Camera2Video");
            cameraThread.start();
            cameraThreadHandler = new Handler(cameraThread.getLooper());
//            cm.openCamera(cameraId,mDeviceStateCallback,previewThreadHandler);
//            cm.openCamera(cameraId,mDeviceStateCallback,cameraThreadHandler);
            cm.openCamera(cameraId,mDeviceStateCallback,null);
        }catch (Exception e){
            e.printStackTrace();
@@ -138,11 +135,13 @@
    private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
            @Override
            public void onOpened(CameraDevice cameraDevice) {
                //预览和图像监测
                mCameraDevice = cameraDevice;
                SurfaceTexture surfaceTexture = getSurfaceTexture();
                surfaceTexture.setDefaultBufferSize(1920, 1080);
                previewSurface = new Surface(surfaceTexture);
                //图像监测
                mImageReader = ImageReader.newInstance(videoWidth, videoHeight, ImageFormat.YUV_420_888, 2);
                try {
                    createPreviewSession();
                }catch (Exception e){
@@ -163,41 +162,39 @@
    private void createPreviewSession() {
        try {
            mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
            mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession session) {
                    mCaptureSession = session;
                    startPreview(mCameraDevice);
                    try {
                        mCaptureSession = session;
                        previewCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                        previewCaptureBuilder.addTarget(previewSurface);
                        /*设置预览的界面*/
                        //设置自动对焦模式
                        previewCaptureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
                        // 设置自动曝光模式
                        previewCaptureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
                        //            captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,  90);
                        startPreview();
                    }catch (Exception e){
                        e.printStackTrace();
                    }
                }
                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                }
            }, null);
            }, previewThreadHandler);
        }catch (Exception e){
            e.printStackTrace();
        }
    }
    private void startPreview(CameraDevice cameraDevice) {
    private void startPreview() {
        //启动预览
        try {
            captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            captureBuilder.addTarget(previewSurface);
            /*设置预览的界面*/
            //设置自动对焦模式
            captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_AUTO);
            // 设置自动曝光模式
            captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
            // 开始对焦
            captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
                    CameraMetadata.CONTROL_AF_TRIGGER_START);
            captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,  90);
            mCaptureSession.setRepeatingRequest(captureBuilder.build(), null, mBackgroundHandler);
            mCaptureSession.setRepeatingRequest(previewCaptureBuilder.build(), null, previewThreadHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
@@ -225,27 +222,41 @@
            e.printStackTrace();
        }
    }*/
    /**视频录制*/
    public void createRecorderSession() throws IOException {
        try {
            //关闭预览
            mCaptureSession.stopRepeating();
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
        //关闭预览线程
        stopPreviewThreadHandler();
        //初始化录制线程
        initThreadHandlerForRecording();
        mImageReader.setOnImageAvailableListener(mImageReaderListener, imageThreadHandler);
        String recorderPath = fileDir.getAbsolutePath()+File.separator + System.currentTimeMillis() + ".mp4";
        try {
            this.mMediaMuxer = new MediaMuxer(recorderPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            this.mMediaMuxer.setOrientationHint(90);
            mMediaCodec = createAndConfigureEncoder(videoWidth, videoHeight);
            mMediaMuxer = new MediaMuxer(recorderPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
//            mMediaMuxer.setOrientationHint(90);
            mImageReader.setOnImageAvailableListener(mImageReaderListener, null);
            captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            captureBuilder.addTarget(previewSurface);
            captureBuilder.addTarget(mImageReader.getSurface());
            videoCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
            videoCaptureBuilder.addTarget(previewSurface);
            videoCaptureBuilder.addTarget(mImageReader.getSurface());
            mCaptureSession.setRepeatingRequest(videoCaptureBuilder.build(), null, videoThreadHandler);
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
        /*try {
            mCameraDevice.createCaptureSession(Arrays.asList(previewSurface,mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession session) {
                    mCaptureSession = session;
                    try {
                        //camera2 视频录制模式
                        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//                        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                        mCaptureSession.setRepeatingRequest(captureBuilder.build(), null, mBackgroundHandler);
                    } catch (Exception e) {
@@ -261,7 +272,7 @@
        } catch (Exception e) {
            e.printStackTrace();
        }
        }*/
    }
    private int mVideoTrackIndex = -1;
@@ -269,18 +280,23 @@
    private ImageReader.OnImageAvailableListener mImageReaderListener = reader -> {
        Image image = reader.acquireNextImage();
        //        Image imageCopy = ImageReader.newInstance(videoWidth, videoHeight, ImageFormat.YUV_420_888, 1).acquireLatestImage();
        byte[] nv12 = new byte[I420size];
        YUVToNV21_NV12(image,nv12,videoWidth,videoHeight,"NV12");
        byte[] nv21 = new byte[I420size];
        YUVUtil.YUVToNV21_NV12(image,nv12,videoWidth,videoHeight,"NV12");
        long now = image.getTimestamp();
        image.close();
        //图片截图
        SharedPreferences camera2Time = mContext.getSharedPreferences("camera2Time", Context.MODE_PRIVATE);
        long lastTime = camera2Time.getLong("time", 0);
        if (lastTime == 0  || now-lastTime > 1000000000) {
        long secondsGap = (now - lastTime) / 1000000000;
        if (lastTime == 0  || secondsGap > 2) {
            camera2Time.edit().putLong("time", now).apply();
            Bitmap bitmap = nv21ToBitmap(nv12, videoWidth, videoHeight);
            saveBitmapToFile(bitmap);
            //需要将nv12转化为nv21后进行图片存储
            nv21 = convertNV12toNV21(nv12);
            Bitmap bitmap = BitmapUtil.nv21ToBitmap(nv21, videoWidth, videoHeight);
            BitmapUtil.saveBitmapToFile(bitmap);
        }
        // 提交数据给MediaCodec
@@ -314,259 +330,42 @@
            }
        }
            /*ByteBuffer encodedData = mMediaCodec.getOutputBuffer(outputBufferIndex);
            if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
                // 如果MediaMuxer还没有准备好,添加视频轨道
                // 写入数据到MediaMuxer
                MediaFormat format = mMediaCodec.getOutputFormat();
                int trackIndex = mMediaMuxer.addTrack(format);
                if(trackIndex)
                mMediaMuxer.start();
                encodedData.position(bufferInfo.offset);
                encodedData.limit(bufferInfo.offset + bufferInfo.size);
                mMediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
            }
            mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);*/
    };
    private static void YUVToNV21_NV12(Image image, byte[] nv21, int w, int h,String type) {
        Image.Plane[] planes = image.getPlanes();
        int remaining0 = planes[0].getBuffer().remaining();
        int remaining1 = planes[1].getBuffer().remaining();
        int remaining2 = planes[2].getBuffer().remaining();
        //分别准备三个数组接收YUV分量。
        byte[] yRawSrcBytes = new byte[remaining0];
        byte[] uRawSrcBytes = new byte[remaining1];
        byte[] vRawSrcBytes = new byte[remaining2];
        planes[0].getBuffer().get(yRawSrcBytes);
        planes[1].getBuffer().get(uRawSrcBytes);
        planes[2].getBuffer().get(vRawSrcBytes);
        int j = 0, k = 0;
        boolean flag = type.equals("NV21");
        for (int i = 0; i < nv21.length; i++) {
            if (i < w * h) {
                //首先填充w*h个Y分量
                nv21[i] = yRawSrcBytes[i];
            } else {
                if (flag) {
                    //若NV21类型 则Y分量分配完后第一个将是V分量
                    nv21[i] = vRawSrcBytes[j];
                    //PixelStride有用数据步长 = 1紧凑按顺序填充,=2每间隔一个填充数据
                    j += planes[1].getPixelStride();
                } else {
                    //若NV12类型 则Y分量分配完后第一个将是U分量
                    nv21[i] = uRawSrcBytes[k];
                    //PixelStride有用数据步长 = 1紧凑按顺序填充,=2每间隔一个填充数据
                    k += planes[2].getPixelStride();
                }
                //紧接着可以交错UV或者VU排列不停的改变flag标志即可交错排列
                flag = !flag;
            }
    public byte[] convertNV12toNV21(byte[] nv12) {
        int length = nv12.length;
        byte[] nv21 = new byte[length];
        // Copy Y component
        System.arraycopy(nv12, 0, nv21, 0, length - (length / 4));
        // Copy and swap U/V components
        for (int i = length - (length / 4), j = length - (length / 4); i < length; i += 2, j += 2) {
            nv21[j] = nv12[i + 1];
            nv21[j + 1] = nv12[i];
        }
    }
    public static byte[] imageToNV12(Image image) {
        final int width = image.getWidth();
        final int height = image.getHeight();
        Image.Plane[] planes = image.getPlanes();
        byte[] yuv = new byte[width * height *2];
        // Y component
        ByteBuffer yBuffer = planes[0].getBuffer();
        int ySize = yBuffer.remaining();
        yBuffer.get(yuv, 0, ySize);
        // U and V components
        ByteBuffer uBuffer = planes[1].getBuffer();
        ByteBuffer vBuffer = planes[2].getBuffer();
        int uSize = uBuffer.remaining();
        int vSize = vBuffer.remaining();
        uBuffer.get(yuv, ySize, uSize);
        vBuffer.get(yuv, ySize + uSize, vSize);
        // Swap U and V for NV21 format
        byte[] swapped = new byte[yuv.length];
        System.arraycopy(yuv, 0, swapped, 0, ySize);
        System.arraycopy(yuv, ySize + uSize, swapped, ySize + uSize, vSize);
        System.arraycopy(yuv, ySize, swapped, ySize, uSize);
        return swapped;
    }
    /*private void encodeFrame(Image image) {
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int inputBufferId = mMediaCodec.dequeueInputBuffer(100);
        if (inputBufferId >= 0) {
            ByteBuffer inputBuffer = mMediaCodec.getInputBuffers()[inputBufferId];
            inputBuffer.clear();
            //这里是重点!
            Image.Plane[] planes = image.getPlanes();
            ByteBuffer[] yuvPlanes = new ByteBuffer[planes.length];
            int[] offsets = new int[planes.length];
            int[] strides = new int[planes.length];
            for (int i = 0; i < planes.length; i++) {
                yuvPlanes[i] = planes[i].getBuffer();
                offsets[i] = yuvPlanes[i].position();
                strides[i] = planes[i].getRowStride();
            }
            int height = image.getHeight();
            int ySize = calculateSize(strides[0], image.getHeight());
            int uSize = calculateSize(strides[1], image.getHeight() / 2);
            int vSize = calculateSize(strides[2], image.getHeight() / 2);
            if (ySize + uSize + vSize > inputBuffer.capacity()) {
                throw new BufferOverflowException();
            }
            // Copy Y plane
            copyPlaneData(yuvPlanes[0], inputBuffer, ySize, strides[0],height);
            // Copy U plane
            copyPlaneData(yuvPlanes[1], inputBuffer, uSize, strides[1],height/2);
            // Copy V plane
            copyPlaneData(yuvPlanes[2], inputBuffer, vSize, strides[2],height/2);
            mMediaCodec.queueInputBuffer(
                    inputBufferId,
                    0,
                    ySize + uSize + vSize,
                    image.getTimestamp(),
                    0
            );
            *//*int ySize = planes[0].getBuffer().remaining();
            int uvSize = planes[1].getBuffer().remaining();
            // Copy Y plane
            planes[0].getBuffer().rewind();
            inputBuffer.put(planes[0].getBuffer());
            // Copy U and V planes
            planes[1].getBuffer().rewind();
            inputBuffer.put(planes[1].getBuffer());
            planes[2].getBuffer().rewind();
            inputBuffer.put(planes[2].getBuffer());
            mMediaCodec.queueInputBuffer(
                    inputBufferId,
                    0,
                    ySize + 2 * uvSize, // Size of YUV data
                    image.getTimestamp(),
                    0
            );*//*
        }
        int encoderStatus = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000);
        MediaFormat format = mMediaCodec.getOutputFormat();
        int track = mMediaMuxer.addTrack(format);
        if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            mMediaMuxer.start();
        } else if (encoderStatus >= 0) {
            ByteBuffer encodedData = mMediaCodec.getOutputBuffers()[encoderStatus];
            if (encodedData != null && encodedData.position() < encodedData.limit()) {
                mMediaMuxer.writeSampleData(track, encodedData, bufferInfo);
            }
            mMediaCodec.releaseOutputBuffer(encoderStatus, false);
        }
    }
    private int calculateSize(int stride, int height) {
        return stride * height;
    }
    private void copyPlaneData(ByteBuffer src, ByteBuffer dst, int size, int stride,int height) {
        int rowPadding = stride - (size / height);
        int shift = 0;
        while (src.hasRemaining()) {
            dst.put(src.get());
            shift++;
            if (shift == stride) {
                shift = 0;
                dst.position(dst.position() + rowPadding);
            }
        }
    }*/
    private static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
        Bitmap bitmap = null;
        try {
            YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            //输出到对应流
            image.compressToJpeg(new Rect(0, 0, width, height), 100, stream);
            //对应字节流生成bitmap
            bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
            stream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return bitmap;
    }
    private void saveBitmapToFile(Bitmap bitmap) {
        // 定义图片的保存路径和文件名
        String fileName = "IMG_" + System.currentTimeMillis() + ".jpg";
        String filePath = Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath()  + "/yc_test"+ File.separator + fileName;
        try{
            FileOutputStream fos = new FileOutputStream(filePath);
            bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
            fos.flush();
        }catch (Exception e){
            e.printStackTrace();
        }
    }
    private void saveImageToGallery(byte[] data) {
        // 定义图片的保存路径和文件名
        String fileName = "IMG_" + System.currentTimeMillis() + ".jpg";
        String filePath = Environment.getExternalStoragePublicDirectory(DIRECTORY_DOCUMENTS).getAbsolutePath()  + "/yc_test"+ File.separator + fileName;
        // 创建文件输出流
        try {
            FileOutputStream fos = new FileOutputStream(filePath);
            fos.write(data);
            fos.close();
            // 通知图库更新
            MediaScannerConnection.scanFile(mContext, new String[]{filePath}, null, null);
            // 在某些设备上,可能需要发送广播通知才能使图片立即出现在相册中
            mContext.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.fromFile(new File(filePath))));
            // 显示保存成功的提示
            //            Toast.makeText(this, "图片保存成功", Toast.LENGTH_SHORT).show();
        } catch (IOException e) {
            e.printStackTrace();
            // 显示保存失败的提示
            //            Toast.makeText(this, "图片保存失败", Toast.LENGTH_SHORT).show();
        }
        return nv21;
    }
    public void stopRecording() {
       //这里只是演示test
        llUpText.setVisibility(View.GONE);
        //停止画面图片监听
        mImageReader.close();
        this.mMediaCodec.stop();
        this.mMediaCodec.release();
        this.mMediaMuxer.stop();
        this.mMediaMuxer.release();
        // 停止录制视频
        try {
//            createPreviewSession();
        } catch (IllegalStateException e) {
            //停止录制视频,停止画面图片监听
            mMediaMuxer.stop();
            mMediaCodec.stop();
            //停止录像Repeating
            mCaptureSession.stopRepeating();
            //线程监控停止
            mImageReader.setOnImageAvailableListener(null, null);
            //停止录制视频线程
            stopThreadHandlerForRecording();
       //这里只是演示test
//        llUpText.setVisibility(View.GONE);
            startPreview();
        } catch (IllegalStateException | CameraAccessException e) {
            e.printStackTrace();
        } finally {
//            mMediaRecorder.reset();
//            mMediaRecorder.release();
//            mMediaRecorder = null;
            mMediaCodec.release();
            mMediaMuxer.release();
        }
        // 关闭相机预览会话
@@ -623,4 +422,50 @@
        return null;
    }
    private void initPreviewThreadHandler(){
        //预览的处理线程
        previewThread = new HandlerThread("camera2Preview");
        previewThread.start();
        previewThreadHandler = new Handler(previewThread.getLooper());
    }
    private void stopPreviewThreadHandler(){
        //预览的处理线程
        try {
            previewThread.quitSafely();
            previewThread.join();
            previewThread = null;
            previewThreadHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    private void initThreadHandlerForRecording() {
        //录像的处理线程
        videoThread = new HandlerThread("Camera2Video");
        videoThread.start();
        videoThreadHandler = new Handler(videoThread.getLooper());
        //图像监测的处理线程
        imageThread = new HandlerThread("camera2Image");
        imageThread.start();
        imageThreadHandler = new Handler(imageThread.getLooper());
    }
    private void stopThreadHandlerForRecording() {
        try {
            videoThread.quitSafely();
            videoThread.join();
            videoThread = null;
            videoThreadHandler = null;
            imageThread.quitSafely();
            imageThread.join();
            imageThread = null;
            imageThreadHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
}