完善资料让更多小伙伴认识你,还能领取20积分哦, 立即完善>
基本思想:最近天降一块RK3399 pro开发板(android系统),摄像头是USB直连,本来是听到rk3399 以为就可以使用Tengine,想测试一番;后来和虫叔交流才知道。rk3399 没有NPU支持,即使是rk3399 pro 的android系统 也需要官方给android的api接口才能调用npu【和客户沟通,客户着急使用,可以先给demo,后在和厂家索要android的npu sdk 唉 坑】;同时偶然间也发现另一个小组用的android系统也是搭载在rk3399芯片上,所以先搞一下简单移植一下ncnn +nanodet移植吧 ;
(1)首先想着白嫖nihui大佬的代码 :GitHub - nihui/ncnn-android-nanodet 移植帧获取~ 然而发现无法在这个RK3399 上运行,个人理解是因为:这里并不是直接从摄像头回去的N21编码数据,而u***摄像头直连开发板,需要使用uvc驱动检测dev设备去获取USB接口信息,而读取帧; (2)网上搜索源码 发现:github源码分为两种 一种是otg转u***板子插槽,然后在外接摄像头,另一种是直连USB摄像头 开始痛苦的原理学习了了这个大神的代码 GitHub - saki4510t/UVCCamera: library and sample to access to UVC web camera on non-rooted Android device ;[花了一个周 我真的太菜了 ~~~]修改了这个大佬的代码GitHub - o0olele/UVCcamera-Opencv: a simple example using opencv in UVCcamera;因为大佬们的代码没有使用C++ 的JNI构建工程,逐进行自己新建工程,去掉无用的包和魔改一下 (注 GitHub - allencheung2018/SwitchU***CameraAndroid: Usage of USB camera; Customize grids and lines when previewing; 这个大神的代码也挺好 但是不会魔改~~~) 魔改之后,我踢掉了原作者的opencv静态包,导入了cv的动态包(这里可以使用up的静态包,可以我还是喜欢实践 不自己动手搞能用的东西 不太睡得着),引入了两个arr文件夹包 把我这个代码简单改了一下 6、Android配置opencv4.5及C++ Native Yolo4.0模型检测_sxj731533730-CSDN博客 ;因为这个工程是建立在Native JNI C++基础上; [下图为rk3399开发板 ] 整了个测试可用(android系统+rk3399+USB直连摄像头<直播芯片是FPGA驱动的摄像头 >)demo ;普通摄像头也可以使用 (3)、修改的部分代码片段 gradle修改片段 repositories { flatDir { dirs 'libs' } } dependencies { implementation fileTree(include: ['*.jar'], dir: 'libs') implementation 'junit:junit:4.12' //noinspection GradleCompatible implementation 'com.android.support:appcompat-v7:28.0.0' implementation project(':OpenCVLib4.4.0') implementation 'com.android.support.constraint:constraint-layout:1.1.3' implementation(name: 'libuvccamera', ext: 'aar') implementation(name: 'u***CameraCommon', ext: 'aar') implementation("com.serenegiant:common:1.5.20") { exclude module: 'support-v4' } } Mainactivity修改片段 package com.sxj731533730.rk3399; import android.content.res.AssetManager; import android.graphics.Bitmap; import org.opencv.android.Utils; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.Point; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgcodecs.Imgcodecs; import android.graphics.SurfaceTexture; import android.hardware.u***.U***Device; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.Surface; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.view.WindowManager; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.Toast; import com.serenegiant.common.BaseActivity; import com.serenegiant.u***.CameraDialog; import com.serenegiant.u***.IFrameCallback; import com.serenegiant.u***.USBMonitor; import com.serenegiant.u***.USBMonitor.OnDeviceConnectListener; import com.serenegiant.u***.USBMonitor.U***ControlBlock; import com.serenegiant.u***.UVCCamera; import com.serenegiant.u***cameracommon.UVCCameraHandler; import com.serenegiant.widget.CameraViewInterface; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.imgproc.Imgproc; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import static org.opencv.imgcodecs.Imgcodecs.imread; import static org.opencv.imgcodecs.Imgcodecs.imwrite; import static org.opencv.imgproc.Imgproc.FONT_HERSHEY_SIMPLEX; import static org.opencv.imgproc.Imgproc.getTextSize; import static org.opencv.imgproc.Imgproc.putText; import static org.opencv.imgproc.Imgproc.rectangle; public final class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent { static { System.loadLibrary("native-lib"); } private static final boolean DEBUG = true; // TODO set false on release private static final String TAG = "MainActivity"; /** * 操作锁 */ private final Object mSync = new Object(); /** * set true if you want to record movie using MediaSurfaceEncoder * (writing frame data into Surface camera from MediaCodec * by almost same way as USBCameratest2) * set false if you want to record movie using MediaVideoEncoder */ private static final boolean USE_SURFACE_ENCODER = false; /** * preview resolution(width) * if your camera does not support specific resolution and mode, * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception */ private static final int PREVIEW_WIDTH = 1280; // 1280 /** * preview resolution(height) * if your camera does not support specific resolution and mode, * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception */ private static final int PREVIEW_HEIGHT = 720; //720 /** * preview mode * if your camera does not support specific resolution and mode, * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception * 0:YUYV, other:MJPEG */ private static final int PREVIEW_MODE = 0; // YUV protected static final int SETTINGS_HIDE_DELAY_MS = 2500; /** * for accessing USB */ private USBMonitor mUSBMonitor; /** * Handler to execute camera related methods sequentially on private thread */ private UVCCameraHandler mCameraHandler; /** * for camera preview display */ private CameraViewInterface mUVCCameraView; /** * for open&start / stop&close camera preview */ private ImageButton mCameraButton; private ImageView mImageView; private boolean isScaling = false; private boolean isInCapturing = false; private int[][] capture_solution = {{640,480}, {800,600},{1024,768}, {1280,1024}}; private int mCaptureWidth = capture_solution[0][0]; private int mCaptureHeight = capture_solution[0][1]; /** * opencv的一个抽象类,为了支持opencv与app之间的相互作用, * 该类声明了一个callback,在opencv manager之后执行,这个回调是为了使opencv在合适的地方进行初始化 */ private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: { Log.i(TAG, "OpenCV loaded successfully"); } break; default: { super.onManagerConnected(status); } break; } } }; private String[] readAsset() { AssetManager am = this.getAssets(); String[] pathStr = null; try { // 列出files目录下的文件 pathStr = am.list("");//存放模型的问位置的 } catch (IOException e) { e.printStackTrace(); } return pathStr; } private String getFileExtension(String filename) { if ((filename != null) && (filename.length() > 0)) { String string = filename.trim(); int index = filename.lastIndexOf("."); if (index > 0 && index < string.length() - 1) { return string.substring(index + 1); } } return filename; } @Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.v(TAG, "onCreate:"); String ,; String []DirFile=readAsset(); for(int i=0;i { if(getFileExtension(DirFile).equals("param")) paramFile=readAsset(); else if(getFileExtension(DirFile).equals("bin")) binFile=readAsset(); } String []labelStringName={"towerhead", "insulator"}; int targetSize=320; float probThreshold=0.3f,nmsThreshold=0.3f; initModel(this.getAssets(),paramFile,binFile,labelStringName,targetSize,probThreshold,nmsThreshold); //初始 getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_main); mCameraButton = findViewById(R.id.imageButton); mCameraButton.setOnClickListener(mOnClickListener); mImageView = (ImageView)findViewById(R.id.imageView); mCaptureWidth = capture_solution[0][0]; mCaptureHeight = capture_solution[0][1]; bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565); final View view = findViewById(R.id.camera_view); mUVCCameraView = (CameraViewInterface)view; mUVCCameraView.setAspectRatio(PREVIEW_WIDTH / (float)PREVIEW_HEIGHT); synchronized (mSync) { mUSBMonitor = new USBMonitor(this, mOnDeviceConnectListener); mCameraHandler = UVCCameraHandler.createHandler(this, mUVCCameraView, USE_SURFACE_ENCODER ? 0 : 1, PREVIEW_WIDTH, PREVIEW_HEIGHT, PREVIEW_MODE); } } @Override protected void onStart() { super.onStart(); Log.v(TAG, "onStart:"); if (!OpenCVLoader.initDebug()) { Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization"); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback); } else { Log.d(TAG, "OpenCV library found inside package. Using it!"); mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS); } synchronized (mSync) { mUSBMonitor.register(); } if (mUVCCameraView != null) { mUVCCameraView.onResume(); } } @Override protected void onStop() { Log.v(TAG, "onStop:"); synchronized (mSync) { mCameraHandler.close(); // #close include #stopRecording and #stopPreview mUSBMonitor.unregister(); } if (mUVCCameraView != null) mUVCCameraView.onPause(); super.onStop(); } @Override public void onDestroy() { Log.v(TAG, "onDestroy:"); synchronized (mSync) { if (mCameraHandler != null) { mCameraHandler.setPreviewCallback(null); //zhf mCameraHandler.release(); mCameraHandler = null; } if (mUSBMonitor != null) { mUSBMonitor.destroy(); mUSBMonitor = null; } } super.onDestroy(); } /** * event handler when click camera / capture button */ private final OnClickListener mOnClickListener = new OnClickListener() { @Override public void onClick(final View view) { synchronized (mSync) { if ((mCameraHandler != null) && !mCameraHandler.isOpened()) { CameraDialog.showDialog(MainActivity.this); } else { mCameraHandler.close(); } } } }; private void startPreview() { synchronized (mSync) { if (mCameraHandler != null) { final SurfaceTexture st = mUVCCameraView.getSurfaceTexture(); /** * 由于surfaceview由另一个线程处理,这里使用消息处理机制 * 对Frame进行回调处理 */ mCameraHandler.setPreviewCallback(mIFrameCallback); mCameraHandler.startPreview(new Surface(st)); } } updateItems(); } private final OnDeviceConnectListener mOnDeviceConnectListener = new OnDeviceConnectListener() { @Override public void onAttach(final U***Device device) { Toast.makeText(MainActivity.this, "USB_DEVICE_ATTACHED", Toast.LENGTH_SHORT).show(); } @Override public void onConnect(final U***Device device, final U***ControlBlock ctrlBlock, final boolean createNew) { if (DEBUG) Log.v(TAG, "onConnect:"); synchronized (mSync) { if (mCameraHandler != null) { mCameraHandler.open(ctrlBlock); startPreview(); updateItems(); } } } @Override public void onDisconnect(final U***Device device, final U***ControlBlock ctrlBlock) { if (DEBUG) Log.v(TAG, "onDisconnect:"); synchronized (mSync) { if (mCameraHandler != null) { queueEvent(new Runnable() { @Override public void run() { try{ // maybe throw java.lang.IllegalStateException: already released mCameraHandler.setPreviewCallback(null); //zhf } catch(Exception e){ e.printStackTrace(); } mCameraHandler.close(); } }, 0); } } } @Override public void onDettach(final U***Device device) { Toast.makeText(MainActivity.this, "USB_DEVICE_DETACHED", Toast.LENGTH_SHORT).show(); } @Override public void onCancel(final U***Device device) { } }; /** * to access from CameraDialog * @return */ @Override public USBMonitor getUSBMonitor() { synchronized (mSync) { return mUSBMonitor; } } @Override public void onDialogResult(boolean canceled) { if (DEBUG) Log.v(TAG, "onDialogResult:canceled=" + canceled); } //================================================================================ private boolean isActive() { return mCameraHandler != null && mCameraHandler.isOpened(); } private boolean checkSupportFlag(final int flag) { return mCameraHandler != null && mCameraHandler.checkSupportFlag(flag); } private int getValue(final int flag) { return mCameraHandler != null ? mCameraHandler.getValue(flag) : 0; } private int setValue(final int flag, final int value) { return mCameraHandler != null ? mCameraHandler.setValue(flag, value) : 0; } private int resetValue(final int flag) { return mCameraHandler != null ? mCameraHandler.resetValue(flag) : 0; } /** * 利用Activity.runOnUiThread(Runnable)把更新ui的代码创建在Runnable中, * 然后在需要更新ui时,把这个Runnable对象传给Activity.runOnUiThread(Runnable) */ private void updateItems() { runOnUiThread(mUpdateItemsOnUITask, 100); } private final Runnable mUpdateItemsOnUITask = new Runnable() { @Override public void run() { if (isFinishing()) return; final int visible_active = isActive() ? View.VISIBLE : View.INVISIBLE; mImageView.setVisibility(visible_active); } }; // if you need frame data as byte array on Java side, you can use this callback method with UVCCamera#setFrameCallback // if you need to create Bitmap in IFrameCallback, please refer following snippet. private Bitmap bitmap = null;//Bitmap.createBitmap(640, 480, Bitmap.Config.RGB_565); private final Bitmap srcBitmap = Bitmap.createBitmap(PREVIEW_WIDTH, PREVIEW_HEIGHT, Bitmap.Config.RGB_565); private String WarnText; private final IFrameCallback mIFrameCallback = new IFrameCallback() { @Override public void onFrame(final ByteBuffer frame) { frame.clear(); if(!isActive() || isInCapturing){ return; } if(bitmap == null){ Toast.makeText(MainActivity.this, "错误:Bitmap为空", Toast.LENGTH_SHORT).show(); return; } /** * 这里进行opencv操作 * srcBitmap:源 * bitmap:处理后 */ synchronized (bitmap) { srcBitmap.copyPixelsFromBuffer(frame); WarnText = ""; if(bitmap.getWidth() != mCaptureWidth || bitmap.getHeight() != mCaptureHeight){ bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565); } Mat rgbMat = new Mat(); bitmap = Bitmap.createBitmap(srcBitmap.getWidth(), srcBitmap.getHeight(), Bitmap.Config.RGB_565); Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B. //imwrite("/sdcard/mat.jpg",rgbMat); Utils.matToBitmap(rgbMat, bitmap); //convert mat to bitmap Mat mat=new Mat(); Imgproc.cvtColor(rgbMat, mat, Imgproc.COLOR_BGR2RGB); //创建一个空的bitmap Bitmap processBitmap = Bitmap.createBitmap(mat.width(), mat.height(), Bitmap.Config.ARGB_8888); //mat转bitmap Utils.matToBitmap(mat, processBitmap); //saveImage(bitmap); long startTime = System.currentTimeMillis(); //起始时间 List long endTime = System.currentTimeMillis(); //结束时间 long runTime = endTime - startTime; int fps = (int)(1.f / runTime * 1000); Log.i("test", String.format("方法使用时间 %d ms", runTime)); Log.i("test", "帧率 "+fps ); Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B. Log.i(TAG, "infoItem.size()= "+infoItem.size()); for (int i=0;i float minX = object.getxMin(); float minY = object.getyMin(); float width = object.getwidth(); float height = object.getheight(); float conf= object.getConfidence(); int labelId = object.getLabelId(); String labelName = object.getLabelName(); Log.i(TAG, "labelName= "+labelName); int[] baseLine = new int[1]; baseLine[0]=0; Size label_size = getTextSize(String.valueOf(conf)+"%", FONT_HERSHEY_SIMPLEX, 1, 1, baseLine); int x = (int)minX; int y = (int) ((int)minY - label_size.height - baseLine[0]); if (y < 0) y = 0; if (x + label_size.width > rgbMat.cols()) x = (int) (rgbMat.cols() - label_size.width); putText(rgbMat, String.valueOf(conf)+"%", new Point(x, y + label_size.height),FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0)); putText(rgbMat, labelName, new Point(x,y+1.8*label_size.height), FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0)); rectangle(rgbMat, new Point(minX,minY), new Point(minX+width, minY+height), new Scalar(255, 0, 0), 1, 8); } Utils.matToBitmap(rgbMat, bitmap); //convert mat to bitmap } mImageView.post(mUpdateImageTask); } }; private static void saveImage(Bitmap bmp) { File appDir = new File(Environment.getExternalStorageDirectory(), ""); if (!appDir.exists()) { appDir.mkdir(); } String fileName = System.currentTimeMillis() + ".jpg"; File file = new File(appDir, fileName); try { FileOutputStream fos = new FileOutputStream(file); bmp.compress(Bitmap.CompressFormat.JPEG, 100, fos); fos.flush(); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private final Runnable mUpdateImageTask = new Runnable() { @Override public void run() { synchronized (bitmap) { mImageView.setImageBitmap(bitmap); } } }; public native void initModel(AssetManager manager, String modelConfiguration, String modelBinary, String[] labelStringName, int targetSize, float probThreshold, float nmsThreshold); public native List } 然后就可以愉快的写JNI模块了~ (4) 然后是移植一下检测代码;主要自己构建了一个cv工程,然后引入了两个arr包,修改了帧处理接口和处理接口 这里也简单放一下代码片段 JNI extern "C" JNIEXPORT void JNICALL Java_com_sxj731533730_rk3399_MainActivity_initModel(JNIEnv *env, jobject thiz, jobject manager, jstring model_configuration, jstring model_binary, jobjectArray label_string_name, jint targetsize, jfloat probthreshold, jfloat nmsthreshold) { // TODO: implement initModel() AAssetManager* mgr = AAssetManager_fromJava(env, manager); // 类别信息 class_names.clear(); vector jsize size = env->GetArrayLength(label_string_name); for (int i = 0; i < size; i++) { jstring obj = (jstring) env->GetObjectArrayElement(label_string_name, i); string sstr = (string) env->GetStringUTFChars(obj, NULL);//得到字符串 class_names.push_back(sstr); } target_size=targetsize; prob_threshold = probthreshold; nms_threshold = nmsthreshold; //取模型配置文件 const char *ptr_modelConfiguration = NULL; if (model_configuration) { ptr_modelConfiguration = (env)->GetStringUTFChars(model_configuration, 0); } //取模型文件 const char *ptr_modelBinary = NULL; if (model_binary) { ptr_modelBinary = (env)->GetStringUTFChars(model_binary, 0); } nanodet.opt.use_vulkan_compute = true; // nanodet.opt.use_bf16_storage = true; nanodet.load_param(mgr, ptr_modelConfiguration); nanodet.load_model(mgr,ptr_modelBinary); } extern "C" JNIEXPORT jobject JNICALL Java_com_sxj731533730_rk3399_MainActivity_detectFrame(JNIEnv *env, jobject thiz, jobject bitmap) { AndroidBitmapInfo inBmpInfo; void *inPixelsAddress; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &inBmpInfo)) < 0) { return NULL; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &inPixelsAddress)) < 0) { return NULL; } Mat rgbaImage; rgbaImage.create(inBmpInfo.height, inBmpInfo.width, CV_8UC4); Mat inMat(inBmpInfo.height, inBmpInfo.width, CV_8UC4, inPixelsAddress); cvtColor(inMat, rgbaImage, COLOR_mRGBA2RGBA); Mat frame; frame.create(inBmpInfo.height, inBmpInfo.width, CV_8UC3); cvtColor(rgbaImage, frame, COLOR_RGBA2BGR); //cv::imwrite("a.jpg",rgbaImage); std::vector |
|
|
|
你正在撰写答案
如果你是对答案或其他答案精选点评或询问,请使用“评论”功能。
1906 浏览 1 评论
synopsys 的design ware:DW_fpv_div,浮点数除法器,默认32位下,想提升覆盖率(TMAX),如果用功能case去提升覆盖率呢?
2382 浏览 1 评论
RK3588 GStreamer调试四路鱼眼摄像头四宫格显示报错
5175 浏览 1 评论
【飞凌嵌入式OK3576-C开发板体验】RKNN神经网络-YOLO图像识别
254 浏览 0 评论
【飞凌嵌入式OK3576-C开发板体验】SSH远程登录网络配置及CAN通讯
1336 浏览 0 评论
小黑屋| 手机版| Archiver| 电子发烧友 ( 湘ICP备2023018690号 )
GMT+8, 2024-11-22 03:20 , Processed in 0.923416 second(s), Total 73, Slave 56 queries .
Powered by 电子发烧友网
© 2015 bbs.elecfans.com
关注我们的微信
下载发烧友APP
电子发烧友观察
版权所有 © 湖南华秋数字科技有限公司
电子发烧友 (电路图) 湘公网安备 43011202000918 号 电信与信息服务业务经营许可证:合字B2-20210191 工商网监 湘ICP备2023018690号