1. 程式人生 > >android studio + opencv開發,objectMatch攝像頭實時處理

android studio + opencv開發,objectMatch攝像頭實時處理

package com.example.wan.trycamerasurf;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.features2d.DescriptorExtractor;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.FeatureDetector;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.Highgui;
import android.os.Bundle;
import android.app.Activity;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import
android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2, View.OnTouchListener{

private static final String TAG = "MainActivity";
private CameraBridgeViewBase mOpenCvCameraView;
RunSURF runSURF = new RunSURF();
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
/* Now enable camera view to start receiving frames */
mOpenCvCameraView.setOnTouchListener(MainActivity.this);
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.d(TAG, "Creating and setting view");
mOpenCvCameraView = (CameraBridgeViewBase) new JavaCameraView(this,-1);
setContentView(mOpenCvCameraView);
mOpenCvCameraView.setCvCameraViewListener(this);
// runSURF = new RunSURF();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}

return super.onOptionsItemSelected(item);
}

@Override
public void onCameraViewStarted(int width, int height) {

}

@Override
public void onCameraViewStopped() {

}
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat img_object = Highgui.imread(Environment.getExternalStorageDirectory().getPath() + "/DCIM/Camera/lena2.png");
Mat img_scene_R = inputFrame.rgba();
Mat img_scene = inputFrame.gray();
Mat outMat = new Mat();
outMat = img_scene;
// System.out.println(img_scene);
FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB);
MatOfKeyPoint keypoints_object = new MatOfKeyPoint();
MatOfKeyPoint keypoints_scene = new MatOfKeyPoint();
detector.detect(img_object, keypoints_object);
detector.detect(img_scene, keypoints_scene);
DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
Mat descriptors_object = new Mat();
Mat descriptors_scene = new Mat();
extractor.compute(img_object, keypoints_object, descriptors_object);
extractor.compute(img_scene, keypoints_scene, descriptors_scene);
Mat imgKepoint = new Mat();
Features2d.drawKeypoints(img_object, keypoints_object, imgKepoint);
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
MatOfDMatch matches = new MatOfDMatch();
matcher.match(descriptors_object, descriptors_scene, matches);
// Mat img_matches = new Mat();
// Features2d.drawMatches(img_object, keypoints_object, img_scene, keypoints_scene, matches, img_matches);
double max_dist = 0;
double min_dist = 100;
//-- Quick calculation of max and min distances between keypoints
for (int i = 0; i < descriptors_object.rows(); i++) {
double dist = matches.toArray()[i].distance;
if (dist < min_dist) min_dist = dist;
if (dist > max_dist) max_dist = dist;
}
System.out.println("-- Max dist : " + max_dist);
System.out.println("-- Min dist : " + min_dist);
//-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist )
MatOfDMatch good_matches = new MatOfDMatch();
for (int i = 0; i < descriptors_object.rows(); i++) {
if (matches.toArray()[i].distance < 3 * min_dist) {
//System.out.println(matches.toArray()[i].distance);
good_matches.push_back(matches.row(i));
}
}
MatOfPoint2f obj = new MatOfPoint2f();
MatOfPoint2f scene = new MatOfPoint2f();
List<Point> objp = new ArrayList<>();
List<Point> scenep = new ArrayList<>();
for (int i = 0; i < good_matches.rows(); i++) {
//-- Get the keypoints from the good matches
objp.add(keypoints_object.toArray()[good_matches.toArray()[i].queryIdx].pt);
scenep.add(keypoints_scene.toArray()[good_matches.toArray()[i].trainIdx].pt);
}
obj.fromList(objp);
scene.fromList(scenep);
Mat H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, Calib3d.CALIB_CB_ADAPTIVE_THRESH);
System.out.println(H);
// Mat H = Calib3d.findHomography(obj,scene);
// Calib3d.findHomography(obj, scene);
//-- Get the corners from the image_1 ( the object to be "detected" )
MatOfPoint2f obj_corners = new MatOfPoint2f();
List<Point> obj_cornersp = new ArrayList<>();
Point point1 = new Point(0, 0);
obj_cornersp.add(point1);
Point point2 = new Point(img_object.cols(), 0);
obj_cornersp.add(point2);
Point point3 = new Point(img_object.cols(), img_object.rows());
obj_cornersp.add(point3);
Point point4 = new Point(0, img_object.rows());
obj_cornersp.add(point4);
obj_corners.fromList(obj_cornersp);
// System.out.println(obj_corners.toList());
MatOfPoint2f scene_corners = new MatOfPoint2f();
Core.perspectiveTransform(obj_corners, scene_corners, H);
// System.out.println(scene_corners.toList());
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
Scalar scalar = new Scalar(0, 255, 0);
Point pp1 = new Point(scene_corners.toArray()[0].x , scene_corners.toArray()[0].y);
Point pp2 = new Point(scene_corners.toArray()[1].x , scene_corners.toArray()[1].y);
Point pp3 = new Point(scene_corners.toArray()[2].x , scene_corners.toArray()[2].y);
Point pp4 = new Point(scene_corners.toArray()[3].x , scene_corners.toArray()[3].y);
// System.out.println(pp1);
// System.out.println(pp2);
// System.out.println(pp3);
// System.out.println(pp4);
Core.line(outMat, pp1, pp2, scalar, 4);
Core.line(outMat, pp2, pp3, scalar, 4);
Core.line(outMat, pp3, pp4, scalar, 4);
Core.line(outMat, pp4, pp1, scalar, 4);
// return runSURF.run(inputFrame.gray());
// if (matches.rows()>0)
// return img_matches;
// else
// return img_scene;
return outMat;
}

@Override
public boolean onTouch(View v, MotionEvent event) {
return false;
}
}
即把上週的java上實現的轉移到android端,能實時的在攝像頭畫面識別特定圖片,目前程式中只支援lena2這幅。。。