On Android, call OpenCV 2.4.10 to implement QR code area positioning (Z-xing code). This article is mainly used for the summary in my own learning. I temporarily posted the code part and supplement the detailed details of the algorithm when I have time in the future.
Activity class Java files
package .android_capture; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import .*; import .*; import .*; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; public class capture extends Activity { private SurfaceView picSV; private Camera camera; private String strPicPath; //The callback function after the OpenCV class library is loaded and initialized successfully, we will not perform any operations here private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case :{ } break; default:{ (status); } break; } } }; @SuppressWarnings("deprecation") @Override protected void onCreate(Bundle savedInstanceState) { (savedInstanceState); (ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); setContentView(); picSV = (SurfaceView) findViewById(); ().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); ().addCallback(new MyCallback()); } private class MyCallback implements Callback{ //When we create a SurfaceView, we need to turn on the camera, set the SurfaceView where the preview is located, set the photos parameters, turn on the preview framing, etc. @Override public void surfaceCreated(SurfaceHolder holder) { try { camera = ();//Open the camera (());//Set picSV to preview and frame Parameters params = ();//Get the camera parameters (800, 480);//Set the size of the photo to 800*480 (800, 480);//Set the preview framing size to 800*480 ("auto");//Open the flash (50);//Set the image quality to 50 (params);//Set the above parameters as the camera parameters (); } catch (IOException e) { //Start preview of the scene, and then we can take photos (); } } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { //When the SurfaceView is destroyed, we will stop previewing, release the camera, garbage collection, etc. (); (); camera = null; } } public void takepic(View v){ //Authentication of autofocus before we start taking pictures (new MyAutoFocusCallback()); } private class MyAutoFocusCallback implements AutoFocusCallback{ @Override public void onAutoFocus(boolean success, Camera camera) { //Start taking pictures (null, null, null, new MyPictureCallback()); } } private class MyPictureCallback implements PictureCallback{ @Override public void onPictureTaken(byte[] data, Camera camera) { try { Bitmap bitmap = (data, 0, ); Matrix matrix = new Matrix(); (90); bitmap = (bitmap ,0,0, (), (),matrix,true); strPicPath = ()+"/1Zxing/"+()+".jpg"; FileOutputStream fos = new FileOutputStream( strPicPath ); (, 100, fos); (); Handler mHandler = new Handler(); (mRunnable); (); } catch (Exception e) { (); } } } public boolean onTouchEvent (MotionEvent event) { int Action = (); if ( 1 == Action ) { (new MyAutoFocusCallback1()); } return true; } private class MyAutoFocusCallback1 implements AutoFocusCallback { @Override public void onAutoFocus(boolean success, Camera camera) { } } @Override public void onResume(){ (); //Load and initialize the OpenCV class library through the OpenCV engine service. The so-called OpenCV engine service is //OpenCV_2.4.3.2_Manager_2.4_*.apk package exists in the apk directory of the OpenCV installation package (OpenCVLoader.OPENCV_VERSION_2_4_10, this, mLoaderCallback); } Runnable mRunnable = new Runnable() { public void run() { List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); String strMissingTime = null; Mat srcColor = new Mat(), srcColorResize = new Mat(); Mat srcGray = new Mat(), srcGrayResize = new Mat(), srcGrayResizeThresh = new Mat(); srcGray = (strPicPath, 0); srcColor = (strPicPath, 1); (srcGray, srcGrayResize, new Size(()*0.2,()*0.2)); (srcColor, srcColorResize, new Size(()*0.2,()*0.2)); long start = (); //Binary addition contour search (srcGrayResize, srcGrayResizeThresh, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 35, 5); (srcGrayResizeThresh, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE); long end = (); strMissingTime = ( end - start ); strMissingTime = strMissingTime + "\r"; //Outline drawing for ( int i = ()-1; i >= 0; i-- ) { MatOfPoint2f NewMtx = new MatOfPoint2f( (i).toArray() ); RotatedRect rotRect = ( NewMtx ); Point vertices[] = new Point[4]; (vertices); List<Point> rectArea = new ArrayList<Point>(); for ( int n = 0; n < 4; n ++ ) { Point temp = new Point(); = vertices[n].x; = vertices[n].y; (temp); } Mat rectMat = Converters.vector_Point_to_Mat(rectArea); double minRectArea = ( rectMat ); Point center = new Point(); float radius[] = {0}; (NewMtx, center, radius); if( ( (i)) < 300 || ( (i)) > 3000 || minRectArea < radius[0]*radius[0]*1.57 ) (i); } (srcColorResize, contours, -1, new Scalar(255,0,0)); (()+"/1Zxing/" +()+"", srcColorResize); File file=new File(()+"/1Zxing/",""); BufferedWriter out = null; try { out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, true))); (strMissingTime); (); } catch (Exception e) { (); } } }; }
document
<FrameLayout xmlns:andro xmlns:tools="/tools" android:layout_width="match_parent" android:layout_height="match_parent" tools:context=".MainActivity" > <SurfaceView android: android:layout_width="match_parent" android:layout_height="match_parent" > </SurfaceView> <ImageButton android:contentDescription="@string/desc" android:onClick="takepic" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_gravity="right|top" android:src="@android:drawable/ic_menu_camera" /> </FrameLayout>
document
<resources> <string name="app_name">Code</string> <string name="desc">Take picture button</string> </resources>
Files (theoretically, they can be automatically generated. If there is any error in the automatic generation of content, you can refer to it)
<resources> <!-- Base application theme, dependent on API level. This theme is replaced by AppBaseTheme from res/values-vXX/ on newer devices. --> <style name="AppBaseTheme" parent="android:"> <!-- Theme customizations available in newer API levels can go in res/values-vXX/, while customizations related to backward-compatibility can go here. --> </style> <!-- Application theme. --> <style name="AppTheme" parent="AppBaseTheme"> <!-- All customizations that are NOT specific to a particular API-level can go here. --> <item name="android:windowNoTitle">true</item> <item name="android:windowFullscreen">true</item> </style> </resources>
document
<manifest xmlns:andro package=".android_capture" android:versionCode="1" android:versionName="1.0" > <uses-permission android:name=""/> <uses-permission android:name=".WRITE_EXTERNAL_STORAGE"/> <uses-sdk android:minSdkVersion="8" android:targetSdkVersion="19" /> <application android:allowBackup="true" android:icon="@drawable/ic_launcher" android:label="@string/app_name" android:theme="@style/AppTheme" > <activity android:name=".capture" > <intent-filter > <action android:name="" /> <category android:name="" /> </intent-filter> </activity> </application> </manifest>
The above is all the content of this article. I hope it will be helpful to everyone's study and I hope everyone will support me more.