Android CameraX 预览以及图片分析(YUV转Bitmap)

涂承运
2023-12-01

CameraX

  • 依赖

	// CameraX core library
	def camerax_version = '1.0.0-beta04'
    implementation "androidx.camera:camera-core:$camerax_version"
    // CameraX Camera2 extensions
    implementation "androidx.camera:camera-camera2:$camerax_version"
    // CameraX Lifecycle library
    implementation "androidx.camera:camera-lifecycle:$camerax_version"
    // CameraX View class
    implementation 'androidx.camera:camera-view:1.0.0-alpha11'
  • 权限

    • 清单文件
<!-- Enable instant app support -->
<dist:module dist:instant="true" />

<!-- Declare features -->
<uses-feature android:name="android.hardware.camera" />

<!-- Declare permissions -->
<uses-permission android:name="android.permission.CAMERA" />
  • 检查相机权限是否打开
private int PERMISSIONS_REQUEST_CODE = 10;

private boolean hasPermissions(Context context) {
    return PackageManager.PERMISSION_GRANTED 
        == ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA);
}

if (!hasPermissions(this)) {
    // 如果没有打开权限(申请权限)
    ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, PERMISSIONS_REQUEST_CODE);
} else {//用户已经授权,初始化相机
    initCameraX();
}
  • 用户权限操作回调
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
    super.onRequestPermissionsResult(requestCode, permissions, grantResults);
    if (requestCode == PERMISSIONS_REQUEST_CODE) {
        if (grantResults.length > 0) {
            if (PackageManager.PERMISSION_GRANTED == grantResults[0]) {
                // 用户授权成功后
                initCameraX();
                Toast.makeText(this, "Permission request granted", Toast.LENGTH_LONG).show();
            } else {
                Toast.makeText(this, "Permission request denied", Toast.LENGTH_LONG).show();
            }
        }
    }
}
  • 界面布局

    • activity_main.xml

<androidx.constraintlayout.widget.ConstraintLayout
    xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    android:id="@+id/camera_container"
    android:background="@android:color/black"
    android:layout_width="match_parent"
    android:layout_height="match_parent">

    <androidx.camera.view.PreviewView
        android:id="@+id/view_finder"
        android:layout_width="match_parent"
        android:layout_height="match_parent" />

</androidx.constraintlayout.widget.ConstraintLayout>
  • 初始化CameraX

//主容器
private ConstraintLayout container;
//预览控件
private PreviewView viewFinder;
private int displayId = -1;
//前置摄像头还是后置摄像头
private int lensFacing = CameraSelector.LENS_FACING_BACK;
private DisplayManager displayManager;
//线程池
private ExecutorService cameraExecutor;
//摄像头旋转角度监听
private DisplayManager.DisplayListener displayListener = new DisplayManager.DisplayListener() {
    @Override
    public void onDisplayAdded(int displayId) {

    }

    @Override
    public void onDisplayRemoved(int displayId) {

    }

    @Override
    public void onDisplayChanged(int displayId) {
        View decorView = getWindow().getDecorView();
        if (displayId == MainActivity.this.displayId) {
            Log.d(TAG, "Rotation changed: " + decorView.getDisplay().getRotation());
            if (imageCapture != null) {
                imageCapture.setTargetRotation(decorView.getDisplay().getRotation());
            }
            if (imageAnalyzer != null) {
                imageAnalyzer.setTargetRotation(decorView.getDisplay().getRotation());
            }
        }
    }
};


private void initCameraX() {
    //初始化控件
    container = findViewById(R.id.camera_container);
    viewFinder = findViewById(R.id.view_finder);

    //每当设备的方向改变时,就对用例进行更新旋转
    displayManager = (DisplayManager) getSystemService(Context.DISPLAY_SERVICE);
    if (displayManager != null) {
        //注册
        displayManager.registerDisplayListener(displayListener, null);
    }

    //初始化线程池
    cameraExecutor = Executors.newSingleThreadExecutor();

    viewFinder.post(() -> {
        //跟踪附加此视图的显示
        displayId = viewFinder.getDisplay().getDisplayId();

        //构建UI控件(暂时不需要这些)
        //updateCameraUi();

        //实例化相机
        setUpCamera();
    });
}
  • 实例化相机

private ProcessCameraProvider cameraProvider = null;

private void setUpCamera() {
    //java1.8(不设置成java8会报错)
    ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(this);
    
    cameraProviderFuture.addListener(() -> {
        // CameraProvider
        try {
            cameraProvider = cameraProviderFuture.get();
            switch (lensFacing) {
                case CameraSelector.LENS_FACING_BACK:
                    //检查是否有后置摄像头
                    hasBackCamera();
                    break;
                case CameraSelector.LENS_FACING_FRONT:
                    //检查是否有前置摄像头
                    hasFrontCamera();
                    break;
                default:
                    throw new IllegalStateException("Back and front camera are unavailable");
            }
        } catch (ExecutionException | InterruptedException e) {
            e.printStackTrace();
        }

        // 启用或禁用相机之间的切换(暂时用不上)
        //updateCameraSwitchButton();

        //建立并绑定相机用例
        bindCameraUseCases();
    }, ContextCompat.getMainExecutor(this));
}

//检查是否有后置摄像头
private boolean hasBackCamera() {
    if (cameraProvider != null) {
        try {
            cameraProvider.hasCamera(CameraSelector.DEFAULT_BACK_CAMERA);
            return true;
        } catch (CameraInfoUnavailableException e) {
            e.printStackTrace();
            return false;
        }
    }
    return false;
}

//检查是否有前置摄像头
private boolean hasFrontCamera() {
    if (cameraProvider != null) {
        try {
            cameraProvider.hasCamera(CameraSelector.DEFAULT_FRONT_CAMERA);
            return true;
        } catch (CameraInfoUnavailableException e) {
            e.printStackTrace();
            return false;
        }
    }
    return false;
}
  • 建立并绑定相机用例

//需要绑定到Camera上的
private Preview preview = null;
private ImageCapture imageCapture = null;
private ImageAnalysis imageAnalyzer = null;

//摄像头
private Camera camera = null;

private double RATIO_4_3_VALUE = 4.0 / 3.0;
private double RATIO_16_9_VALUE = 16.0 / 9.0;

private void bindCameraUseCases() {
	DisplayMetrics metrics = new DisplayMetrics();
    viewFinder.getDisplay().getRealMetrics(metrics);
    
    //宽高比
    int screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels);
    
    //旋转角度
    int rotation = viewFinder.getDisplay().getRotation();
    
    if (cameraProvider == null) {
        throw new IllegalStateException("Camera initialization failed.");
    }
    
    CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(lensFacing).build();
    //预览
    preview = new Preview.Builder()
        //宽高比
        .setTargetAspectRatio(screenAspectRatio)
        //旋转角度
        .setTargetRotation(rotation)
        .build();
    
    //图像捕获
    imageCapture = new ImageCapture.Builder()
        .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
        // 宽高比
        .setTargetAspectRatio(screenAspectRatio)
        // 旋转角度
        .setTargetRotation(rotation)
        .build();
    
    // 图像分析
    imageAnalyzer = new ImageAnalysis.Builder()
        .setTargetAspectRatio(screenAspectRatio)
        .setTargetRotation(rotation)
        .build();
    
    //绑定前先解绑
    cameraProvider.unbindAll();
    
    try {
        //生命周期绑定
        camera = cameraProvider.bindToLifecycle(
            this, cameraSelector, preview, imageCapture, imageAnalyzer);

        //在viewFinder上预览
        if (preview != null) {
            preview.setSurfaceProvider(viewFinder.createSurfaceProvider());
        }

    } catch (Exception exc) {
        Log.e(TAG, "Use case binding failed", exc);
    }
}

private int aspectRatio(int width, int height) {
    int max = Math.max(width, height);
    int min = Math.min(width, height);
    double previewRatio = (double) max / (double) min;
    if (Math.abs(previewRatio - RATIO_4_3_VALUE) <= Math.abs(previewRatio - RATIO_16_9_VALUE)) {
        return AspectRatio.RATIO_4_3;
    }
    return AspectRatio.RATIO_16_9;
}
  • 销毁

    @Override
    protected void onDestroy() {
        super.onDestroy();
        cameraExecutor.shutdown();
        displayManager.unregisterDisplayListener(displayListener);
    }
    

上面实现了简单的预览


图片分析(YUV 转 bitmap)

ImageAnalysis #setAnalyzer

  • 官方

//代码插入位置
private void bindCameraUseCases() {
    //...
    // 图像分析
    imageAnalyzer = new ImageAnalysis.Builder()
        .setTargetAspectRatio(screenAspectRatio)
        .setTargetRotation(rotation)
        .build();
    
    // 图像分析监听
    imageAnalyzer.setAnalyzer(cameraExecutor, image -> {
        ImageProxy.PlaneProxy[] planes = image.getPlanes();
        new Thread(() -> {

            for (int i = 0; i < planes.length; i++) {
                Log.i(TAG, "pixelStride  " + planes[i].getPixelStride());
                Log.i(TAG, "rowStride   " + planes[i].getRowStride());
                Log.i(TAG, "width  " + image.getWidth());
                Log.i(TAG, "height  " + image.getHeight());
                Log.i(TAG, "Finished reading data from plane  " + i);
            }

            //cameraX 获取yuv
            ByteBuffer yBuffer = planes[0].getBuffer();
            ByteBuffer uBuffer = planes[1].getBuffer();
            ByteBuffer vBuffer = planes[2].getBuffer();

            int ySize = yBuffer.remaining();
            int uSize = uBuffer.remaining();
            int vSize = vBuffer.remaining();

            byte[] nv21 = new byte[ySize + uSize + vSize];

            yBuffer.get(nv21, 0, ySize);
            vBuffer.get(nv21, ySize, vSize);
            uBuffer.get(nv21, ySize + vSize, uSize);

            //开始时间
            long START = System.currentTimeMillis();
            //获取yuvImage
            YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, image.getWidth(), image.getHeight(), null);
            //输出流
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            //压缩写入out
            yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 50, out);
            //转数组
            byte[] imageBytes = out.toByteArray();
            //生成bitmap
            Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
            //旋转bitmap
            Bitmap rotateBitmap = rotateBitmap(bitmap, 90);
            //结束时间
            long END = System.currentTimeMillis();
            runOnUiThread(() -> {
                imageView.setImageBitmap(rotateBitmap);
                Log.e(TAG, "耗时: " + (END - START));
                //关闭
                image.close();
            });
        }).start();
    });
    
    //... 
}

 private Bitmap rotateBitmap(Bitmap origin, float alpha) {
     if (origin == null) {
         return null;
     }
     int width = origin.getWidth();
     int height = origin.getHeight();
     Matrix matrix = new Matrix();
     matrix.setRotate(alpha);
     // 围绕原地进行旋转
     Bitmap newBM = Bitmap.createBitmap(origin, 0, 0, width, height, matrix, false);
     if (newBM.equals(origin)) {
         return newBM;
     }
     origin.recycle();
     return newBM;
 }
  • intel


/**
 * 
 * @param data		yuv byte数组
 * @param w 		宽度
 * @param h			高度	
 * @param uOff		nv21填:1; nv12填:0;
 * @param vOff		nv21填:0; nv12填:1;
 * @return
 */
private static Bitmap spToBitmap(byte[] data, int w, int h, int uOff, int vOff) {
    int plane = w * h;
    int[] colors = new int[plane];
    int yPos = 0, uvPos = plane;
    for(int j = 0; j < h; j++) {
        for(int i = 0; i < w; i++) {
            // YUV byte to RGB int
            final int y1 = data[yPos] & 0xff;
            final int u = (data[uvPos + uOff] & 0xff) - 128;
            final int v = (data[uvPos + vOff] & 0xff) - 128;
            final int y1192 = 1192 * y1;
            int r = (y1192 + 1634 * v);
            int g = (y1192 - 833 * v - 400 * u);
            int b = (y1192 + 2066 * u);

            r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r);
            g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g);
            b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b);
            colors[yPos] = ((r << 6) & 0xff0000) |
                ((g >> 2) & 0xff00) |
                ((b >> 10) & 0xff);

            if((yPos++ & 1) == 1) uvPos += 2;
        }
        if((j & 1) == 0) uvPos -= w;
    }
    return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565);
}
  • C(忘记拷贝了谁的代码,昨天复制下来的,今天就搜不回来了)

JNIEXPORT jintArray JNICALL
Java_com_weiou_cameraxdemo_MainActivity_decodeYUV420SP(JNIEnv *env, jobject thiz, jbyteArray buf,
                                                       jint width, jint height) {
    jbyte *yuv420sp = (*env)->GetByteArrayElements(env, buf, 0);
    int frameSize = width * height;
    jint rgb[frameSize]; // 新图像像素值

    int i = 0, j = 0, yp = 0;
    int uvp = 0, u = 0, v = 0;
    for (j = 0, yp = 0; j < height; j++) {
        uvp = frameSize + (j >> 1) * width;
        u = 0;
        v = 0;
        for (i = 0; i < width; i++, yp++) {
            int y = (0xff & ((int) yuv420sp[yp])) - 16;
            if (y < 0)
                y = 0;
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]) - 128;
                u = (0xff & yuv420sp[uvp++]) - 128;
            }

            int y1192 = 1192 * y;
            int r = (y1192 + 1634 * v);
            int g = (y1192 - 833 * v - 400 * u);
            int b = (y1192 + 2066 * u);

            if (r < 0) r = 0; else if (r > 262143) r = 262143;
            if (g < 0) g = 0; else if (g > 262143) g = 262143;
            if (b < 0) b = 0; else if (b > 262143) b = 262143;

            rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
        }
    }

    jintArray result = (*env)->NewIntArray(env, frameSize);
    (*env)->SetIntArrayRegion(env, result, 0, frameSize, rgb);
    (*env)->ReleaseByteArrayElements(env, buf, yuv420sp, 0);
    return result;
}

Java_com_weiou_cameraxdemo_MainActivity 这一段记得修改

 类似资料: