当前位置: 首页 > 编程笔记 >

利用OPENCV为android开发畸变校正的JNI库方法

田谦
2023-03-14
本文向大家介绍利用OPENCV为android开发畸变校正的JNI库方法,包括了利用OPENCV为android开发畸变校正的JNI库方法的使用技巧和注意事项,需要的朋友参考一下

需要为项目提供一套畸变校正的算法,由于需要大量的矩阵运算,考虑到效率和适时性,使用JNI开发,希望把有关数组短阵的处理的变换全部放入C语言中处理。

主要用于android移动端,大致的数据来源一是从camera直接读取YUV数据,一种是从第三方接读取RGB数据,另一种是直接对BITMAP进行处理。

1.考虑到硬件设备接口,第三方软件接口,图像接口,OPENCV接口,希望能够开发出通用的算法库,一劳永逸的解决各种复杂的使用场景,因此数据要支持YUV,支持ARGB,支持MAT

2android对BITMAP有获取像素点的操作,也有通过象素点生成BITMAP的操作,而且有很多图像处理接口和第三方可以处理RGB矩阵,如

bm.getPixels(pixs, 0, w, 0, 0, w, h);
int[] pixs1 = new int[w*h]; 
    final Bitmap bm2 = Bitmap.createBitmap(pixs1, w, h, Bitmap.Config.ARGB_8888);

因此设计如下接口,入口为ARGB的整型,输出也是整型

public static native boolean RgbaUndistort(int[] argb, int width, int height, int[] pixels);

3考虑到有些情况需要二维数组,

public static native boolean RgbaUndistort2(int[][] rgb, int width, int height, int[] pixels);

4考虑到OPENCV的MAT结构,由于MAT有matToBitmap可以直接转化为BITMAP,应用MAT 提供

public static native boolean RgbaUndistortMat(int[] argb, int width, int height, long pArgbOutMatAddr);

5考虑到第三方使用MAT的情况,因此输入也可以支持MAT因此设计接口

public static native boolean RgbMatUndistortMat(long pArgbMatAddr, int width, int height, long pArgbOutMatAddr);

6考虑到摄像头输出YUV,提供YUV数据处理, 一个输出RGB, 一个输出MAT

public static native boolean YuvNv21UndistortRgba(byte[] YuvNv21, int width, int height, int[] pixels);
public static native boolean YuvNv21UndistortRgbaMat(byte[] YuvNv21, int width, int height, long pMatAddr);

7考虑到可能有不需要畸变的场合,为YUV设计一个灰度,一个RGB接口

public static native boolean YuvNv21ToGray(byte[] YuvNv21,int width, int height, int[] pixels);
public static native boolean YuvNv21ToRGBA(byte[] YuvNv21, int width, int height, int[] pixels);

8于是编写简单的JAVA头源生类

public class ImageProc3 {
	static {
		System.loadLibrary("ImgProc3");
	}
	
	public static native boolean YuvNv21ToGray(byte[] YuvNv21,int width, int height, int[] pixels);
	public static native boolean YuvNv21ToRGBA(byte[] YuvNv21, int width, int height, int[] pixels);
	
	
	public static native boolean RgbaUndistort(int[] argb, int width, int height, int[] pixels);
	public static native boolean RgbaUndistort2(int[][] rgb, int width, int height, int[] pixels);
	public static native boolean RgbaUndistortMat(int[] argb, int width, int height, long pArgbOutMatAddr);
	public static native boolean RgbMatUndistortMat(long pArgbMatAddr, int width, int height, long pArgbOutMatAddr);
	
	public static native boolean YuvNv21UndistortRgba(byte[] YuvNv21, int width, int height, int[] pixels);
	public static native boolean YuvNv21UndistortRgbaMat(byte[] YuvNv21, int width, int height, long pMatAddr);
 
}

进入BIN目录的classes文件夹使用java -classpath . -jni 生成C头文件

根据头文件编写实现的C代码

#include <stdio.h>
#include <jni.h>
#include<Android/log.h>
 
 
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
 
 
using namespace std;
using namespace cv;
 
 
#define TAG  "Camera XXXXX" // 锟斤拷锟斤拷锟斤拷远锟斤拷锟斤拷LOG锟侥憋拷识
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,TAG,__VA_ARGS__) // 锟斤拷锟斤拷LOGD锟斤拷锟斤拷
 
 
#ifdef __cplusplus
extern "C" {
#endif
/*
 * Class:   ImgProc_ImageProc3
 * Method:  YuvNv21ToGray
 * Signature: ([BII[I)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_YuvNv21ToGray
 (JNIEnv *jenv, jclass jclassz, jbyteArray YuvNv21, jint width, jint height, jintArray pixels){
 
 
	jbyte * pNV21FrameData = jenv->GetByteArrayElements(YuvNv21, 0);
	jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
 
 
	Mat mNV(height, width, CV_8UC1, (unsigned char*) pNV21FrameData);
	Mat mBgra(height, width, CV_8UC4, (unsigned char*) poutPixels);
 
 
	cvtColor(mNV, mBgra, CV_YUV420sp2RGBA);
 
 
	jenv->ReleaseByteArrayElements(YuvNv21, pNV21FrameData, 0);
	jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
  return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  YuvNv21ToRGBA
 * Signature: ([BII[I)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_YuvNv21ToRGBA
 (JNIEnv *jenv, jclass jclassz, jbyteArray YuvNv21, jint width, jint height, jintArray pixels){
	jbyte * pBuf = (jbyte*) jenv->GetByteArrayElements(YuvNv21, 0);
	jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
 
 
	Mat image(height + height / 2, width, CV_8UC1, (unsigned char *) pBuf);
	Mat rgba(height, width, CV_8UC4, (unsigned char*) poutPixels);
	Mat tmp(height, width, CV_8UC4);
	cvtColor(image, tmp, CV_YUV420sp2RGBA);
 
 
	vector <Mat> channels;
	split(tmp, channels);
	Mat r = channels.at(0);
	Mat g = channels.at(1);
	Mat b = channels.at(2);
	Mat a = channels.at(3);
 
 
	vector <Mat> mbgr(4);
	mbgr[0] = b;
	mbgr[1] = g;
	mbgr[2] = r;
	mbgr[3] = a;
 
 
	merge(mbgr, rgba);
 
 
	jenv->ReleaseByteArrayElements(YuvNv21, pBuf, 0);
	jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  RgbaUndistort
 * Signature: ([III[I)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_RgbaUndistort
 (JNIEnv *jenv, jclass jclassz, jintArray argb, jint width, jint height, jintArray pixels){
	jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
	jint * pinPixels = jenv->GetIntArrayElements(argb, 0);
 
 
	Mat out(height, width, CV_8UC4, (unsigned char*) poutPixels);
	Mat in(height, width, CV_8UC4, (unsigned char*) pinPixels);
 
 
	double cam[] = {width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
	undistort(in, out, camMat, disMat);
 
 
	jenv->ReleaseIntArrayElements(argb, pinPixels, 0);
	jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  RgbaUndistort2
 * Signature: ([[III[I)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_RgbaUndistort2(JNIEnv *jenv,
		jclass jclassz, jobjectArray argb, jint width, jint height,
		jintArray pixels) {
 
 
	jint i, j;
	int row = jenv->GetArrayLength(argb);
	jintArray myarray = (jintArray)(jenv->GetObjectArrayElement(argb, 0));
	int col = jenv->GetArrayLength(myarray);
	jint jniData[row][col];
	LOGD("jiaXXX %s", "Java_ImgProc_ImageProc_convertRGB3");
	for (i = 0; i < row; i++) {
		myarray = (jintArray)(jenv->GetObjectArrayElement(argb, i));
		jint *coldata = jenv->GetIntArrayElements(myarray, 0);
 
 
		for (j = 0; j < col; j++) {
			jniData[i][j] = coldata[j];
			LOGD("jiaXXX %d", jniData[i][j]);
		}
 
 
		jenv->ReleaseIntArrayElements(myarray, coldata, 0);
 
 
	}
 
 
	Mat img = Mat(row, col, CV_8UC4, jniData);
	LOGD("jiaXXX %x", img.at<unsigned int>(1, 1));
 
 
	double cam[] = {width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
 
 
	jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
	Mat out(height, width, CV_8UC4, (unsigned char*) poutPixels);
	undistort(img, out, camMat, disMat);
 
 
	jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  RgbaUndistortMat
 * Signature: ([IIIJ)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_RgbaUndistortMat
 (JNIEnv *jenv, jclass jclassz, jintArray argb, jint width, jint height, jlong pArgbOutMatAddr){
 
 
	//jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
	jint * pinPixels = jenv->GetIntArrayElements(argb, 0);
 
 
	//Mat out(height, width, CV_8UC4, (unsigned char*) poutPixels);
	Mat in(height, width, CV_8UC4, (unsigned char*) pinPixels);
	Mat out = *((Mat*)pArgbOutMatAddr);
 
 
	double cam[] = {width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
	undistort(in, out, camMat, disMat);
 
 
	jenv->ReleaseIntArrayElements(argb, pinPixels, 0);
	//jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  RgbMatUndistortMat
 * Signature: (JIIJ)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_RgbMatUndistortMat
 (JNIEnv *jenv, jclass jclassz, jlong pArgbMatAddr, jint width, jint height, jlong pArgbOutMatAddr){
 
 
	Mat in=*((Mat*)pArgbMatAddr);
	Mat out = *((Mat*)pArgbOutMatAddr);
 
 
	double cam[] = {width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
	undistort(in, out, camMat, disMat);
 
 
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  YuvNv21UndistortRgba
 * Signature: ([BII[I)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_YuvNv21UndistortRgba
 (JNIEnv *jenv, jclass jclassz, jbyteArray YuvNv21, jint width, jint height, jintArray pixels){
 
 
	jbyte * pBuf = (jbyte*) jenv->GetByteArrayElements(YuvNv21, 0);
	jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
 
 
	Mat image(height + height / 2, width, CV_8UC1, (unsigned char *) pBuf);
	Mat rgba(height, width, CV_8UC4, (unsigned char*) poutPixels);
	Mat tmp(height, width, CV_8UC4);
	cvtColor(image, tmp, CV_YUV420sp2RGBA);
 
 
	double cam[] = { width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
	undistort(tmp, tmp, camMat, disMat);
 
 
	vector < Mat > channels;
	split(tmp, channels);
	Mat r = channels.at(0);
	Mat g = channels.at(1);
	Mat b = channels.at(2);
	Mat a = channels.at(3);
 
 
	vector < Mat > mbgr(4);
	mbgr[0] = b;
	mbgr[1] = g;
	mbgr[2] = r;
	mbgr[3] = a;
 
 
	merge(mbgr, rgba);
 
 
	jenv->ReleaseByteArrayElements(YuvNv21, pBuf, 0);
	jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
	return true;
}
 
 
/*
 * Class:   ImgProc_ImageProc3
 * Method:  YuvNv21UndistortRgbaMat
 * Signature: ([BIIJ)Z
 */
JNIEXPORT jboolean JNICALL Java_ImgProc_ImageProc3_YuvNv21UndistortRgbaMat
 (JNIEnv *jenv, jclass jclassz, jbyteArray YuvNv21, jint width, jint height, jlong pMatAddr){
 
 
	jbyte * pBuf = (jbyte*) jenv->GetByteArrayElements(YuvNv21, 0);
	//jint * poutPixels = jenv->GetIntArrayElements(pixels, 0);
 
 
	Mat image(height + height / 2, width, CV_8UC1, (unsigned char *) pBuf);
	//Mat rgba(height, width, CV_8UC4, (unsigned char*) poutPixels);
	Mat rgba = *((Mat*) pMatAddr);
	Mat tmp(height, width, CV_8UC4);
	cvtColor(image, tmp, CV_YUV420sp2RGBA);
 
 
	double cam[] = { width, 0, width / 2, 0, height, height / 2, 0, 0, 1 };
	double distort[] = { 0.1, 0.35, 0.0, 0.0, 0.01 };
 
 
	Mat camMat = Mat(3, 3, CV_64FC1, cam);
	Mat disMat = Mat(5, 1, CV_64FC1, distort);
	undistort(tmp, tmp, camMat, disMat);
 
 
	vector < Mat > channels;
	split(tmp, channels);
	Mat r = channels.at(0);
	Mat g = channels.at(1);
	Mat b = channels.at(2);
	Mat a = channels.at(3);
 
 
	vector < Mat > mbgr(4);
	mbgr[0] = b;
	mbgr[1] = g;
	mbgr[2] = r;
	mbgr[3] = a;
 
 
	merge(mbgr, rgba);
 
 
	jenv->ReleaseByteArrayElements(YuvNv21, pBuf, 0);
	//jenv->ReleaseIntArrayElements(pixels, poutPixels, 0);
 
 
	return true;
}
 
 
#ifdef __cplusplus
}
#endif

以上这篇利用OPENCV为android开发畸变校正的JNI库方法就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持小牛知识库。

 类似资料:
  • 本文向大家介绍android广角相机畸变校正算法和实现示例,包括了android广角相机畸变校正算法和实现示例的使用技巧和注意事项,需要的朋友参考一下 1.光学相机镜头一般都存在畸变的问题,畸变属于成像的几何失真,它是由于焦平面上不同区域对影像的放大率不同而形成的画面扭曲变形现象。除了一些特定的场合,大部分情况下,这些失真都是需要校正到正常人眼不产生扭曲的程度。 2常见的畸变是枕形畸变,桶形畸变和

  • 本文向大家介绍Android Studio中使用jni进行opencv开发的环境配置方法,包括了Android Studio中使用jni进行opencv开发的环境配置方法的使用技巧和注意事项,需要的朋友参考一下 使用jni进行opencv开发可以快速地将PC端的opencv代码移植到手机上,但是如何在android studio下进行配置,网上几乎找不到教程,大多都是eclipse下使用mk文件的

  • 本文向大家介绍Android开发之利用jsoup解析HTML页面的方法,包括了Android开发之利用jsoup解析HTML页面的方法的使用技巧和注意事项,需要的朋友参考一下 本文实例讲述了Android利用jsoup解析HTML页面的方法。分享给大家供大家参考,具体如下: 这节主要是讲解jsoup解析HTML页面。由于在android开发过程中,不可避免的涉及到web页面的抓取,解析,展示等等,

  • 本文向大家介绍记录Android studio JNI开发的三种方式(推荐),包括了记录Android studio JNI开发的三种方式(推荐)的使用技巧和注意事项,需要的朋友参考一下 概述 在Andorid Studio不支持JNI开发之前大家一般都是使用Eclipse开发JNI,各种配置让人觉得很蛋疼。从Andorid Studio支持JNI开发后,让我们开发JNI变的如此简单。 NDK 和

  • 本文向大家介绍Android Studio中配置OpenCV库开发环境的教程,包括了Android Studio中配置OpenCV库开发环境的教程的使用技巧和注意事项,需要的朋友参考一下 1、下载 进入官网(http://opencv.org/)下载OpenCV4Android并解压。目录结构如下图所示。 其中,sdk目录即是我们开发opencv所需要的类库;samples目录中存放着若干open

  • 本文向大家介绍java通过jni调用opencv处理图像的方法,包括了java通过jni调用opencv处理图像的方法的使用技巧和注意事项,需要的朋友参考一下 1. 建立java文件 2. 切换到工程src文件夹 javac getImageFeature.java javah getImageFeature 生成 getImageFeature.h 文件 3. VS2013建立dll工程 添加g